code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
import os
## helper functions ##
def _ip_ok(handler):
using('basic')
# reject ip if not local
# this will not work if there is a proxy server
# because the proxy is seen as local
ip = handler.client_address[0]
return ip[:7]=='192.168' or ip[:9]=='127.0.0.1'
def _send_html(message):
using('upload','send')
form_template = upload_template % ('pic_form','uploadPic')
html = html_template % (form_template,message)
send(
200,
{ 'content-type': 'text/html; charset=utf-8' },
html
)
def _defang_name(name):
from re import sub
from config import unwanted_chars
return sub(unwanted_chars,'',name)
def _upload_dir():
using('dirs')
return os.path.join( serverRoot, 'uploads')
## get() and post() ##
def get():
using('basic')
if path=='/uploadPic': _send_html('')
def post():
using('basic', 'upload')
if path=='/uploadPic':
# possible errors
if not _ip_ok(handler):
_send_html(
"Only accepting requests from local network."
)
if upload_ext.lower()!='jpg':
_send_html( "only accepting jpg files" )
# upload it
absfile = os.path.join(
_upload_dir(),
_defang_name(upload_filename)
)
if upload(absfile):
_send_html(
'Upload OK. Find picture at ' + absfile
)
else:
_send_html(
'Could not upload '+_defang_name(upload_filename)
)
html_template = """
<!doctype html>
<!-- generated with Python's string format from a template -->
<html>
<head>
<title>Upload a Picture</title>
</head><body>
<h1>Upload A Picture</h1>
<div>%s</div>
<p>This will upload a JPG picture to the media subdirectory,
provided you are working from a local network.</p>
<p>%s</p>
</body></html>
"""
| J-Adrian-Zimmer/ProgrammableServer | expanders/uploadPic.py | Python | mit | 1,825 |
# coding: utf-8
BACKUP_NAME = 'gae_backup_'
# back up entity kinds.
# BACKUP_KINDS = '*' # back up all kinds. except starts with '_' entities.
# BACKUP_KINDS = 'Model1'
# BACKUP_KINDS = ['Model1', 'Model2']
BACKUP_KINDS = '*'
# backup filesystem. 'gs' is Google Cloud Storage or blank is Blobstore
BACKUP_FILESYSTEM = 'gs'
BACKUP_GS_BUCKET_NAME = 'gae-backup-python-test.appspot.com'
# Delete backup files after n days
BACKUP_EXPIRE_DAYS = 3
| t4kash/gae_backup_python | test/gae_backup_python/config.py | Python | mit | 447 |
import abc
import inspect
import logging
import random
import sys
from collections import namedtuple
log = logging.getLogger("edx.courseware")
# This is a tuple for holding scores, either from problems or sections.
# Section either indicates the name of the problem or the name of the section
Score = namedtuple("Score", "earned possible graded section module_id")
def aggregate_scores(scores, section_name="summary"):
"""
scores: A list of Score objects
returns: A tuple (all_total, graded_total).
all_total: A Score representing the total score summed over all input scores
graded_total: A Score representing the score summed over all graded input scores
"""
total_correct_graded = sum(score.earned for score in scores if score.graded)
total_possible_graded = sum(score.possible for score in scores if score.graded)
total_correct = sum(score.earned for score in scores)
total_possible = sum(score.possible for score in scores)
#regardless of whether or not it is graded
all_total = Score(
total_correct,
total_possible,
False,
section_name,
None
)
#selecting only graded things
graded_total = Score(
total_correct_graded,
total_possible_graded,
True,
section_name,
None
)
return all_total, graded_total
def invalid_args(func, argdict):
"""
Given a function and a dictionary of arguments, returns a set of arguments
from argdict that aren't accepted by func
"""
args, _, keywords, _ = inspect.getargspec(func)
if keywords:
return set() # All accepted
return set(argdict) - set(args)
def grader_from_conf(conf):
"""
This creates a CourseGrader from a configuration (such as in course_settings.py).
The conf can simply be an instance of CourseGrader, in which case no work is done.
More commonly, the conf is a list of dictionaries. A WeightedSubsectionsGrader
with AssignmentFormatGrader's or SingleSectionGrader's as subsections will be
generated. Every dictionary should contain the parameters for making either a
AssignmentFormatGrader or SingleSectionGrader, in addition to a 'weight' key.
"""
if isinstance(conf, CourseGrader):
return conf
subgraders = []
for subgraderconf in conf:
subgraderconf = subgraderconf.copy()
weight = subgraderconf.pop("weight", 0)
# NOTE: 'name' used to exist in SingleSectionGrader. We are deprecating SingleSectionGrader
# and converting everything into an AssignmentFormatGrader by adding 'min_count' and
# 'drop_count'. AssignmentFormatGrader does not expect 'name', so if it appears
# in bad_args, go ahead remove it (this causes no errors). Eventually, SingleSectionGrader
# should be completely removed.
name = 'name'
try:
if 'min_count' in subgraderconf:
#This is an AssignmentFormatGrader
subgrader_class = AssignmentFormatGrader
elif name in subgraderconf:
#This is an SingleSectionGrader
subgrader_class = SingleSectionGrader
else:
raise ValueError("Configuration has no appropriate grader class.")
bad_args = invalid_args(subgrader_class.__init__, subgraderconf)
# See note above concerning 'name'.
if bad_args.issuperset({name}):
bad_args = bad_args - {name}
del subgraderconf[name]
if len(bad_args) > 0:
log.warning("Invalid arguments for a subgrader: %s", bad_args)
for key in bad_args:
del subgraderconf[key]
subgrader = subgrader_class(**subgraderconf)
subgraders.append((subgrader, subgrader.category, weight))
except (TypeError, ValueError) as error:
# Add info and re-raise
msg = ("Unable to parse grader configuration:\n " +
str(subgraderconf) +
"\n Error was:\n " + str(error))
raise ValueError(msg), None, sys.exc_info()[2]
return WeightedSubsectionsGrader(subgraders)
class CourseGrader(object):
"""
A course grader takes the totaled scores for each graded section (that a student has
started) in the course. From these scores, the grader calculates an overall percentage
grade. The grader should also generate information about how that score was calculated,
to be displayed in graphs or charts.
A grader has one required method, grade(), which is passed a grade_sheet. The grade_sheet
contains scores for all graded section that the student has started. If a student has
a score of 0 for that section, it may be missing from the grade_sheet. The grade_sheet
is keyed by section format. Each value is a list of Score namedtuples for each section
that has the matching section format.
The grader outputs a dictionary with the following keys:
- percent: Contains a float value, which is the final percentage score for the student.
- section_breakdown: This is a list of dictionaries which provide details on sections
that were graded. These are used for display in a graph or chart. The format for a
section_breakdown dictionary is explained below.
- grade_breakdown: This is a list of dictionaries which provide details on the contributions
of the final percentage grade. This is a higher level breakdown, for when the grade is constructed
of a few very large sections (such as Homeworks, Labs, a Midterm, and a Final). The format for
a grade_breakdown is explained below. This section is optional.
A dictionary in the section_breakdown list has the following keys:
percent: A float percentage for the section.
label: A short string identifying the section. Preferably fixed-length. E.g. "HW 3".
detail: A string explanation of the score. E.g. "Homework 1 - Ohms Law - 83% (5/6)"
category: A string identifying the category. Items with the same category are grouped together
in the display (for example, by color).
prominent: A boolean value indicating that this section should be displayed as more prominent
than other items.
A dictionary in the grade_breakdown list has the following keys:
percent: A float percentage in the breakdown. All percents should add up to the final percentage.
detail: A string explanation of this breakdown. E.g. "Homework - 10% of a possible 15%"
category: A string identifying the category. Items with the same category are grouped together
in the display (for example, by color).
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def grade(self, grade_sheet, generate_random_scores=False):
'''Given a grade sheet, return a dict containing grading information'''
raise NotImplementedError
class WeightedSubsectionsGrader(CourseGrader):
"""
This grader takes a list of tuples containing (grader, category_name, weight) and computes
a final grade by totalling the contribution of each sub grader and multiplying it by the
given weight. For example, the sections may be
[ (homeworkGrader, "Homework", 0.15), (labGrader, "Labs", 0.15), (midtermGrader, "Midterm", 0.30),
(finalGrader, "Final", 0.40) ]
All items in section_breakdown for each subgrader will be combined. A grade_breakdown will be
composed using the score from each grader.
Note that the sum of the weights is not take into consideration. If the weights add up to
a value > 1, the student may end up with a percent > 100%. This allows for sections that
are extra credit.
"""
def __init__(self, sections):
self.sections = sections
def grade(self, grade_sheet, generate_random_scores=False):
total_percent = 0.0
section_breakdown = []
grade_breakdown = []
for subgrader, category, weight in self.sections:
subgrade_result = subgrader.grade(grade_sheet, generate_random_scores)
weighted_percent = subgrade_result['percent'] * weight
section_detail = u"{0} = {1:.2%} of a possible {2:.2%}".format(category, weighted_percent, weight)
total_percent += weighted_percent
section_breakdown += subgrade_result['section_breakdown']
grade_breakdown.append({'percent': weighted_percent, 'detail': section_detail, 'category': category})
return {'percent': total_percent,
'section_breakdown': section_breakdown,
'grade_breakdown': grade_breakdown}
class SingleSectionGrader(CourseGrader):
"""
This grades a single section with the format 'type' and the name 'name'.
If the name is not appropriate for the short short_label or category, they each may
be specified individually.
"""
def __init__(self, type, name, short_label=None, category=None):
self.type = type
self.name = name
self.short_label = short_label or name
self.category = category or name
def grade(self, grade_sheet, generate_random_scores=False):
found_score = None
if self.type in grade_sheet:
for score in grade_sheet[self.type]:
if score.section == self.name:
found_score = score
break
if found_score or generate_random_scores:
if generate_random_scores: # for debugging!
earned = random.randint(2, 15)
possible = random.randint(earned, 15)
else: # We found the score
earned = found_score.earned
possible = found_score.possible
percent = earned / float(possible)
detail = u"{name} - {percent:.0%} ({earned:.3n}/{possible:.3n})".format(
name=self.name,
percent=percent,
earned=float(earned),
possible=float(possible)
)
else:
percent = 0.0
detail = u"{name} - 0% (?/?)".format(name=self.name)
breakdown = [{'percent': percent, 'label': self.short_label,
'detail': detail, 'category': self.category, 'prominent': True}]
return {'percent': percent,
'section_breakdown': breakdown,
#No grade_breakdown here
}
class AssignmentFormatGrader(CourseGrader):
"""
Grades all sections matching the format 'type' with an equal weight. A specified
number of lowest scores can be dropped from the calculation. The minimum number of
sections in this format must be specified (even if those sections haven't been
written yet).
min_count defines how many assignments are expected throughout the course. Placeholder
scores (of 0) will be inserted if the number of matching sections in the course is < min_count.
If there number of matching sections in the course is > min_count, min_count will be ignored.
show_only_average is to suppress the display of each assignment in this grader and instead
only show the total score of this grader in the breakdown.
hide_average is to suppress the display of the total score in this grader and instead
only show each assignment in this grader in the breakdown.
If there is only a single assignment in this grader, then it acts like a SingleSectionGrader
and returns only one entry for the grader. Since the assignment and the total are the same,
the total is returned but is not labeled as an average.
category should be presentable to the user, but may not appear. When the grade breakdown is
displayed, scores from the same category will be similar (for example, by color).
section_type is a string that is the type of a singular section. For example, for Labs it
would be "Lab". This defaults to be the same as category.
short_label is similar to section_type, but shorter. For example, for Homework it would be
"HW".
starting_index is the first number that will appear. For example, starting_index=3 and
min_count = 2 would produce the labels "Assignment 3", "Assignment 4"
"""
def __init__(self, type, min_count, drop_count, category=None, section_type=None, short_label=None,
show_only_average=False, hide_average=False, starting_index=1):
self.type = type
self.min_count = min_count
self.drop_count = drop_count
self.category = category or self.type
self.section_type = section_type or self.type
self.short_label = short_label or self.type
self.show_only_average = show_only_average
self.starting_index = starting_index
self.hide_average = hide_average
def grade(self, grade_sheet, generate_random_scores=False):
def total_with_drops(breakdown, drop_count):
'''calculates total score for a section while dropping lowest scores'''
#create an array of tuples with (index, mark), sorted by mark['percent'] descending
sorted_breakdown = sorted(enumerate(breakdown), key=lambda x: -x[1]['percent'])
# A list of the indices of the dropped scores
dropped_indices = []
if drop_count > 0:
dropped_indices = [x[0] for x in sorted_breakdown[-drop_count:]]
aggregate_score = 0
for index, mark in enumerate(breakdown):
if index not in dropped_indices:
aggregate_score += mark['percent']
if (len(breakdown) - drop_count > 0):
aggregate_score /= len(breakdown) - drop_count
return aggregate_score, dropped_indices
#Figure the homework scores
scores = grade_sheet.get(self.type, [])
breakdown = []
for i in range(max(self.min_count, len(scores))):
if i < len(scores) or generate_random_scores:
if generate_random_scores: # for debugging!
earned = random.randint(2, 15)
possible = random.randint(earned, 15)
section_name = "Generated"
else:
earned = scores[i].earned
possible = scores[i].possible
section_name = scores[i].section
percentage = earned / float(possible)
summary_format = u"{section_type} {index} - {name} - {percent:.0%} ({earned:.3n}/{possible:.3n})"
summary = summary_format.format(
index=i + self.starting_index,
section_type=self.section_type,
name=section_name,
percent=percentage,
earned=float(earned),
possible=float(possible)
)
else:
percentage = 0
summary = u"{section_type} {index} Unreleased - 0% (?/?)".format(
index=i + self.starting_index,
section_type=self.section_type
)
short_label = u"{short_label} {index:02d}".format(
index=i + self.starting_index,
short_label=self.short_label
)
breakdown.append({'percent': percentage, 'label': short_label,
'detail': summary, 'category': self.category})
total_percent, dropped_indices = total_with_drops(breakdown, self.drop_count)
for dropped_index in dropped_indices:
breakdown[dropped_index]['mark'] = {'detail': u"The lowest {drop_count} {section_type} scores are dropped."
.format(drop_count=self.drop_count, section_type=self.section_type)}
if len(breakdown) == 1:
# if there is only one entry in a section, suppress the existing individual entry and the average,
# and just display a single entry for the section. That way it acts automatically like a
# SingleSectionGrader.
total_detail = u"{section_type} = {percent:.0%}".format(
percent=total_percent,
section_type=self.section_type,
)
total_label = u"{short_label}".format(short_label=self.short_label)
breakdown = [{'percent': total_percent, 'label': total_label,
'detail': total_detail, 'category': self.category, 'prominent': True}, ]
else:
total_detail = u"{section_type} Average = {percent:.0%}".format(
percent=total_percent,
section_type=self.section_type
)
total_label = u"{short_label} Avg".format(short_label=self.short_label)
if self.show_only_average:
breakdown = []
if not self.hide_average:
breakdown.append({'percent': total_percent, 'label': total_label,
'detail': total_detail, 'category': self.category, 'prominent': True})
return {'percent': total_percent,
'section_breakdown': breakdown,
#No grade_breakdown here
}
| B-MOOC/edx-platform | common/lib/xmodule/xmodule/graders.py | Python | agpl-3.0 | 17,288 |
#!/usr/bin/env python3
from lxml import etree
etree.set_default_parser(etree.HTMLParser())
import os
import subprocess
import requests
from urllib.parse import urljoin
from io import BytesIO
tmpdir = './tmp/'
indexes = [
'http://www.budget.gov.au/2014-15/content/bp1/html/index.htm',
'http://www.budget.gov.au/2014-15/content/bp2/html/index.htm',
'http://www.budget.gov.au/2014-15/content/bp3/html/index.htm',
'http://www.budget.gov.au/2014-15/content/bp4/html/index.htm' ]
chunk_size = 4096
def main():
pdfs = []
for index_uri in indexes:
print("up to:", index_uri)
data = requests.get(index_uri).content
et = etree.parse(BytesIO(data))
for elem in et.xpath('//a[contains(@href, ".pdf")]'):
href = elem.get('href')
if href.find('consolidated') == -1:
continue
idx = len(pdfs)
pdf = os.path.join(tmpdir, '%d.pdf' % (idx))
pdfs.append(pdf)
tmpf = pdf + '_tmp'
if os.access(pdf, os.R_OK):
print("skipping %d, already down..." % (idx))
continue
print("getting:", pdf)
req = requests.get(urljoin(index_uri, href), stream=True)
with open(tmpf, 'wb') as fd:
for data in req.iter_content(chunk_size):
fd.write(data)
os.rename(tmpf, pdf)
cmd = [ 'pdftk' ] + pdfs + ['cat', 'output', 'budget2014.pdf']
print(cmd)
subprocess.call(cmd)
if __name__ == '__main__':
main()
| grahame/budget2014 | budget14.py | Python | apache-2.0 | 1,557 |
from django.apps import AppConfig
class BanConfig(AppConfig):
name = "django_sonic_screwdriver.apps.ban"
| rhazdon/django-sonic-screwdriver | django_sonic_screwdriver/apps/ban/apps.py | Python | mit | 111 |
## Copyright (c) 2015 Ryan Koesterer GNU General Public License v3
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
import pandas as pd
import numpy as np
from uga import Model
from uga import Parse
from uga import Variant
import pysam
from uga import Fxns
from Bio import bgzf
from uga import Process
import multiprocessing as mp
import sys
import os
import resource
import logging
import pickle
import glob
logging.basicConfig(format='%(asctime)s - %(processName)s - %(name)s - %(message)s',level=logging.DEBUG)
logger = logging.getLogger("RunSnvgroup")
def process_regions(regions_df, cfg, cpu, log):
regions_df = regions_df[regions_df['cpu'] == cpu].reset_index(drop=True)
if log:
try:
log_file = open(cfg['out'] + '.cpu' + str(cpu) + '.log','w')
except:
print(Process.Error("unable to initialize log file " + cfg['out'] + '.cpu' + str(cpu) + '.log').out)
return 1
stdout_orig = sys.stdout
sys.stdout = log_file
models_obj = {}
variants_found = False
model_written = {}
meta_written = {}
results_final_models = {}
results_final_models_headers = {}
results_final_meta = {}
meta_objs = {}
variants_files = {}
variant_ref = Variant.Ref()
model_loaded = {}
for n in cfg['model_order']:
model_written[n] = False
results_final_models[n] = pd.DataFrame({})
variants_files[n] = glob.glob(cfg['models'][n]['file'].replace('[CHR]','*'))
model_loaded[n] = False
for meta in cfg['meta_order']:
meta_written[meta] = False
results_final_meta[meta] = pd.DataFrame({})
meta_objs[meta] = getattr(Model,cfg['models'][cfg['meta'][meta].split('+')[0]]['fxn'].capitalize() + 'Meta')(tag = meta, meta = cfg['meta'][meta])
last_chr = None
for k in range(len(regions_df.index)):
meta_incl = []
region_written = False
results_region = pd.DataFrame({})
print('')
print('loading region ' + str(k+1) + '/' + str(len(regions_df.index)) + ' (' + regions_df['group_id'][k] + ": " + regions_df['region'][k] + ') ...')
for n in cfg['model_order']:
if not model_loaded[n] or (last_chr != regions_df['chr'][k] and len(variants_files[n]) > 1):
if not model_loaded[n]:
print("\nloading model for " + n if n != '___no_tag___' else "\nloading model")
else:
print("\nupdating model for " + n if n != '___no_tag___' else "\nupdating model")
try:
models_obj[n] = getattr(Model,cfg['models'][n]['fxn'].capitalize())(fxn=cfg['models'][n]['fxn'],
snvgroup_map=cfg['snvgroup_map'],
dep_var=cfg['models'][n]['dep_var'],
covars=cfg['models'][n]['covars'],
format=cfg['models'][n]['format'],
skat_wts=cfg['models'][n]['skat_wts'],
burden_wts=cfg['models'][n]['burden_wts'],
skat_method=cfg['models'][n]['skat_method'],
cmac=cfg['models'][n]['cmac'],
mafrange=cfg['models'][n]['mafrange'],
timeout=cfg['timeout'],
all_founders=cfg['models'][n]['all_founders'],
case_code=cfg['models'][n]['case_code'],
ctrl_code=cfg['models'][n]['ctrl_code'],
pheno=cfg['models'][n]['pheno'],
variants_file=cfg['models'][n]['file'].replace('[CHR]',str(regions_df['chr'][k])), # variants_file=cfg['models'][n]['file']
samples_file=cfg['models'][n]['sample'],
drop_file=cfg['models'][n]['drop'],
keep_file=cfg['models'][n]['keep'],
type=cfg['models'][n]['fxn'],
fid=cfg['models'][n]['fid'],
iid=cfg['models'][n]['iid'],
matid=cfg['models'][n]['matid'],
patid=cfg['models'][n]['patid'],
sex=cfg['models'][n]['sex'],
male=cfg['models'][n]['male'],
female=cfg['models'][n]['female'],
sep=cfg['models'][n]['sep'])
except Process.Error as err:
print(err.out)
return 1
model_loaded[n] = True
try:
models_obj[n].get_region(regions_df['region'][k], group_id=regions_df['group_id'][k])
except:
pass
try:
models_obj[n].get_snvgroup(cfg['buffer'], regions_df['group_id'][k])
except:
if not variants_found:
print(' (' + n + ') processed 0 variants')
pass
variants_found = True
if models_obj[n].variants.duplicated is not None:
print(' WARNING! The following duplicated variant identifiers were generated')
print('\n'.join([' ' + d for d in models_obj[n].variants.duplicated]))
if len(cfg['meta_order']) > 0:
if n == cfg['model_order'][0]:
variant_ref.load(models_obj[n].variants.info)
else:
variant_ref.update(models_obj[n].variants.info)
models_obj[n].variants.align(variant_ref)
try:
models_obj[n].filter(miss_thresh=cfg['models'][n]['miss'], maf_thresh=cfg['models'][n]['maf'], maxmaf_thresh=cfg['models'][n]['maxmaf'],
mac_thresh=cfg['models'][n]['mac'], rsq_thresh=cfg['models'][n]['rsq'], hwe_thresh=cfg['models'][n]['hwe'],
hwe_maf_thresh=cfg['models'][n]['hwe_maf'], allow_mono=cfg['models'][n]['allow_mono'])
except:
pass
try:
logger.debug("calc_model")
models_obj[n].calc_model()
except Process.Error as err:
print(err.out)
pass
if len(cfg['meta_order']) > 0:
if models_obj[n].results['err'][0] == 0:
meta_incl.append(n)
if not model_written[n]:
results_final_models[n] = models_obj[n].out
results_final_models_headers[n] = models_obj[n].out.columns.values
model_written[n] = True
else:
results_final_models[n] = results_final_models[n].append(models_obj[n].out, ignore_index=True)
if len(cfg['meta_order']) > 0:
models_obj[n].tag_results(n)
if not region_written:
results_region = models_obj[n].out
region_written = True
else:
results_region = results_region.merge(models_obj[n].out, how='outer')
analyzed = len(models_obj[n].variant_stats['filter'][models_obj[n].variant_stats['filter'] == 0])
status = ' (' + n + ') processed ' + str(models_obj[n].variants.info.shape[0]) + ' variants, ' + str(analyzed) + ' passed filters'
print(status)
sys.stdout.flush()
if len(cfg['meta_order']) > 0:
for meta in cfg['meta_order']:
meta_objs[meta].calc_meta(regions_df['chr'][k], regions_df['start'][k], regions_df['end'][k], regions_df['group_id'][k], models_obj[cfg['meta'][meta].split('+')[0]],meta_incl)
print(' processed meta analysis ' + meta + ' (' + "+".join([x for x in cfg['meta'][meta].split('+') if x in meta_incl]) + ')')
if not meta_written[meta]:
results_final_meta[meta] = meta_objs[meta].out.copy()
meta_written[meta] = True
else:
results_final_meta[meta] = results_final_meta[meta].merge(meta_objs[meta].out, how='outer')
last_chr = regions_df['chr'][k]
for n in cfg['model_order']:
pkl = open('/'.join(cfg['out'].split('/')[0:-1]) + '/' + cfg['out'].split('/')[-1] + '.cpu' + str(cpu) + '.' + n + '.pkl', "wb")
pickle.dump([results_final_models[n].sort_values(by=['chr','start']),models_obj[n].metadata,results_final_models_headers[n],models_obj[n].tbx_start,models_obj[n].tbx_end],pkl,protocol=2)
pkl.close()
if len(cfg['meta_order']) > 0:
for meta in cfg['meta_order']:
results_final_meta[meta] = results_final_meta[meta].sort_values(by=['chr','start'])
results_final_meta[meta]['chr'] = results_final_meta[meta]['chr'].astype(np.int64)
results_final_meta[meta]['start'] = results_final_meta[meta]['start'].astype(np.int64)
results_final_meta[meta]['end'] = results_final_meta[meta]['end'].astype(np.int64)
pkl = open('/'.join(cfg['out'].split('/')[0:-1]) + '/' + cfg['out'].split('/')[-1] + '.cpu' + str(cpu) + '.' + meta + '.pkl', "wb")
pickle.dump([results_final_meta[meta],meta_objs[meta].metadata,np.array(results_final_meta[meta].columns.values),meta_objs[meta].tbx_start,meta_objs[meta].tbx_end],pkl,protocol=2)
pkl.close()
if log:
sys.stdout = stdout_orig
log_file.close()
if variants_found:
return 0
else:
return -1
def RunSnvgroup(args):
cfg = Parse.generate_snvgroup_cfg(args)
Parse.print_snvgroup_options(cfg)
if not cfg['debug']:
logging.disable(logging.CRITICAL)
regions_df = pd.read_table(cfg['region_file'], compression='gzip' if cfg['region_file'].split('.')[-1] == 'gz' else None)
regions_df = regions_df[regions_df['job'] == int(cfg['job'])].reset_index(drop=True)
return_values = {}
models_out = {}
bgzfiles = {}
print('')
for m in cfg['model_order']:
print("initializing out file for model " + m)
models_out[m] = cfg['out'] if m == '___no_tag___' else cfg['out'] + '.' + m
try:
bgzfiles[m] = bgzf.BgzfWriter(models_out[m] + '.gz', 'wb')
except:
print(Process.Error("failed to initialize bgzip format out file " + models_out[m] + '.gz').out)
return 1
if len(cfg['meta_order']) > 0:
for m in cfg['meta_order']:
print("initializing out file for meta " + m)
models_out[m] = cfg['out'] + '.' + m
try:
bgzfiles[m] = bgzf.BgzfWriter(models_out[m] + '.gz', 'wb')
except:
print(Process.Error("failed to initialize bgzip format out file " + models_out[m] + '.gz').out)
return 1
if cfg['cpus'] > 1:
pool = mp.Pool(cfg['cpus']-1)
for i in range(1,cfg['cpus']):
return_values[i] = pool.apply_async(process_regions, args=(regions_df,cfg,i,True,))
print("submitting job on cpu " + str(i) + " of " + str(cfg['cpus']))
pool.close()
print("executing job for cpu " + str(cfg['cpus']) + " of " + str(cfg['cpus']) + " via main process")
main_return = process_regions(regions_df,cfg,cfg['cpus'],True)
pool.join()
if 1 in [return_values[i].get() for i in return_values] or main_return == 1:
print(Process.Error("error detected, see log files").out)
return 1
else:
main_return = process_regions(regions_df,cfg,1,True)
if main_return == 1:
print(Process.Error("error detected, see log files").out)
return 1
for i in range(1,cfg['cpus']+1):
try:
logfile = open(cfg['out'] + '.cpu' + str(i) + '.log', 'r')
except:
print(Process.Error("failed to initialize log file " + cfg['out'] + '.cpu' + str(i) + '.log').out)
return 1
print(logfile.read())
logfile.close()
os.remove(cfg['out'] + '.cpu' + str(i) + '.log')
for m in cfg['model_order']:
written = False
for i in range(1,cfg['cpus']+1):
out_model_cpu = '/'.join(cfg['out'].split('/')[0:-1]) + '/' + cfg['out'].split('/')[-1] + '.cpu' + str(i) + '.' + m + '.pkl'
pkl = open(out_model_cpu,"rb")
results_final,metadata,results_header,tbx_start,tbx_end = pickle.load(pkl)
if not written:
bgzfiles[m].write(metadata)
bgzfiles[m].write("\t".join(results_header) + '\n')
written = True
if results_final.shape[0] > 0:
bgzfiles[m].write(results_final.replace({'None': 'NA'}).to_csv(index=False, sep='\t', header=False, na_rep='NA', float_format='%.5g', columns = results_header))
pkl.close()
os.remove(out_model_cpu)
bgzfiles[m].close()
print("indexing out file for model " + m if m != '___no_tag___' else "indexing out file")
try:
pysam.tabix_index(models_out[m] + '.gz',seq_col=0,start_col=tbx_start,end_col=tbx_end,force=True)
except:
print(Process.Error('failed to generate index for file ' + models_out[m] + '.gz').out)
return 1
if len(cfg['meta_order']) > 0:
for m in cfg['meta_order']:
written = False
for i in range(1,cfg['cpus']+1):
out_model_meta = '/'.join(cfg['out'].split('/')[0:-1]) + '/' + cfg['out'].split('/')[-1] + '.cpu' + str(i) + '.' + m + '.pkl'
pkl = open(out_model_meta,"rb")
results_final_meta,metadata,results_header,tbx_start,tbx_end = pickle.load(pkl)
if not written:
bgzfiles[m].write(metadata)
bgzfiles[m].write('\t'.join(results_header) + '\n')
written = True
if results_final_meta.shape[0] > 0:
bgzfiles[m].write(results_final_meta.replace({'None': 'NA'}).to_csv(index=False, sep='\t', header=False, na_rep='NA', float_format='%.5g', columns = results_header))
pkl.close()
os.remove(out_model_meta)
bgzfiles[m].close()
print("indexing out file for meta " + m)
try:
pysam.tabix_index(models_out[m] + '.gz',seq_col=0,start_col=tbx_start,end_col=tbx_end,force=True)
except:
print(Process.Error('failed to generate index for file ' + models_out[m] + '.gz').out)
return 1
print("process complete")
return 0
| rmkoesterer/uga | uga/RunSnvgroup.py | Python | gpl-3.0 | 13,158 |
from __future__ import print_function
import boto3
import sys
import hashlib
import botocore.exceptions
import json
import requests
print('Loading function')
with open('secret.txt', 'r') as f:
secret = f.read()
def hash_for(info):
return hashlib.sha256(info['email'] + info['name'] + secret).hexdigest()
def lambda_handler(event, context):
print("Received event: " + json.dumps(event, indent=2))
shake_prefix = 'shakedowns/'
operation = event['operation']
s3 = boto3.resource('s3')
bucket = s3.Bucket('quarter-state')
res = {}
if operation == 'login':
url = "https://kencoder.auth0.com/tokeninfo"
token = event['token']
r = requests.post(url, data = {"id_token": token})
r.raise_for_status()
info = r.json()
user = {}
for k in ['picture', 'name', 'email']:
user[k] = info[k]
user['hash'] = hash_for(user)
res['user'] = user
res['status'] = 'ok'
else:
user = event['user']
res = {'user': user}
if user['hash'] != hash_for(user):
raise Exception("Invalid security")
if operation == 'gear':
r = requests.get("https://docs.google.com/spreadsheets/d/1pJUBDAWn6qBsSIU-SHnI6gHnprbKPPtrubU_toHvWJw/pub?output=tsv")
res['expected-gear'] = r.text
res['status'] = 'ok'
elif operation == 'store':
bucket.put_object(Key= shake_prefix + event['file'], Body=event['body'])
res['status'] = 'ok'
elif operation == 'get':
try:
# TODO figure out specific error
txt = s3.Object(bucket_name='quarter-state', key=shake_prefix + event['file']).get()['Body'].read()
res['shakedown'] = txt
except:
# res['errorMessage'] = str(sys.exc_info()[0])
# return res
pass
res['status'] = 'ok'
elif operation == 'list':
rs = bucket.objects.filter(Prefix=shake_prefix)
res['shakedowns'] = [f.key[len(shake_prefix):] for f in rs]
res['status'] = 'ok'
else:
raise Exception("Unknown operation " + operation)
return res
| KenCoder/quarter | lambda/lambda_function.py | Python | epl-1.0 | 2,243 |
"""
Copyright (c) 2014, Samsung Electronics Co.,Ltd.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of Samsung Electronics Co.,Ltd..
"""
"""
cuda4py - CUDA cffi bindings and helper classes.
URL: https://github.com/ajkxyz/cuda4py
Original author: Alexey Kazantsev <a.kazantsev@samsung.com>
"""
"""
CUBLAS cffi bindings and helper classes.
"""
import cffi
import cuda4py._cffi as cuffi
from cuda4py._py import CU
#: ffi parser
ffi = None
#: Loaded shared library
lib = None
#: Error codes
CUBLAS_STATUS_SUCCESS = 0
CUBLAS_STATUS_NOT_INITIALIZED = 1
CUBLAS_STATUS_ALLOC_FAILED = 3
CUBLAS_STATUS_INVALID_VALUE = 7
CUBLAS_STATUS_ARCH_MISMATCH = 8
CUBLAS_STATUS_MAPPING_ERROR = 11
CUBLAS_STATUS_EXECUTION_FAILED = 13
CUBLAS_STATUS_INTERNAL_ERROR = 14
CUBLAS_STATUS_NOT_SUPPORTED = 15
CUBLAS_STATUS_LICENSE_ERROR = 16
#: Error descriptions
ERRORS = {
CUBLAS_STATUS_NOT_INITIALIZED: "CUBLAS_STATUS_NOT_INITIALIZED",
CUBLAS_STATUS_ALLOC_FAILED: "CUBLAS_STATUS_ALLOC_FAILED",
CUBLAS_STATUS_INVALID_VALUE: "CUBLAS_STATUS_INVALID_VALUE",
CUBLAS_STATUS_ARCH_MISMATCH: "CUBLAS_STATUS_ARCH_MISMATCH",
CUBLAS_STATUS_MAPPING_ERROR: "CUBLAS_STATUS_MAPPING_ERROR",
CUBLAS_STATUS_EXECUTION_FAILED: "CUBLAS_STATUS_EXECUTION_FAILED",
CUBLAS_STATUS_INTERNAL_ERROR: "CUBLAS_STATUS_INTERNAL_ERROR",
CUBLAS_STATUS_NOT_SUPPORTED: "CUBLAS_STATUS_NOT_SUPPORTED",
CUBLAS_STATUS_LICENSE_ERROR: "CUBLAS_STATUS_LICENSE_ERROR"
}
#: cublasOperation_t
CUBLAS_OP_N = 0
CUBLAS_OP_T = 1
CUBLAS_OP_C = 2
#: cublasDataType_t
CUBLAS_DATA_FLOAT = 0
CUBLAS_DATA_DOUBLE = 1
CUBLAS_DATA_HALF = 2
CUBLAS_DATA_INT8 = 3
#: cublasPointerMode_t
CUBLAS_POINTER_MODE_HOST = 0
CUBLAS_POINTER_MODE_DEVICE = 1
def _initialize(backends):
global lib
if lib is not None:
return
# C function definitions
# size_t instead of void* is used
# for convinience with python calls and numpy arrays.
src = """
typedef int cublasStatus_t;
typedef void *cublasHandle_t;
typedef int cublasOperation_t;
typedef int cublasPointerMode_t;
typedef int cublasDataType_t;
cublasStatus_t cublasCreate_v2(cublasHandle_t *handle);
cublasStatus_t cublasDestroy_v2(cublasHandle_t handle);
cublasStatus_t cublasSgemm_v2(
cublasHandle_t handle,
cublasOperation_t transa,
cublasOperation_t transb,
int m,
int n,
int k,
size_t alpha,
size_t A,
int lda,
size_t B,
int ldb,
size_t beta,
size_t C,
int ldc);
cublasStatus_t cublasDgemm_v2(
cublasHandle_t handle,
cublasOperation_t transa,
cublasOperation_t transb,
int m,
int n,
int k,
size_t alpha,
size_t A,
int lda,
size_t B,
int ldb,
size_t beta,
size_t C,
int ldc);
cublasStatus_t cublasSgemmEx(
cublasHandle_t handle,
cublasOperation_t transa,
cublasOperation_t transb,
int m,
int n,
int k,
size_t alpha,
size_t A,
cublasDataType_t Atype,
int lda,
size_t B,
cublasDataType_t Btype,
int ldb,
size_t beta,
size_t C,
cublasDataType_t Ctype,
int ldc);
cublasStatus_t cublasSetPointerMode_v2(cublasHandle_t handle,
cublasPointerMode_t mode);
"""
# Parse
global ffi
ffi = cffi.FFI()
ffi.cdef(src)
# Load library
for libnme in backends:
try:
lib = ffi.dlopen(libnme)
break
except OSError:
pass
else:
ffi = None
raise OSError("Could not load cublas library")
global ERRORS
for code, msg in ERRORS.items():
if code in CU.ERRORS:
s = " | " + msg
if s not in CU.ERRORS[code]:
CU.ERRORS[code] += s
else:
CU.ERRORS[code] = msg
def initialize(backends=("libcublas.so", "cublas64_65.dll")):
"""Loads shared library.
"""
cuffi.initialize()
global lib
if lib is not None:
return
with cuffi.lock:
_initialize(backends)
class CUBLAS(object):
"""CUBLAS functions can be invoked from this class.
"""
def __init__(self, context):
self._context = context
self._lib = None
context._add_ref(self)
initialize()
handle = ffi.new("cublasHandle_t *")
with context:
err = lib.cublasCreate_v2(handle)
if err:
self._handle = None
raise CU.error("cublasCreate_v2", err)
self._lib = lib # to hold the reference
self._handle = handle[0]
def __int__(self):
return self.handle
@property
def handle(self):
return self._handle
@property
def context(self):
return self._context
def set_pointer_mode(self, mode=CUBLAS_POINTER_MODE_DEVICE):
"""Sets the pointer mode used by the cuBLAS library.
Parameters:
mode: CUBLAS_POINTER_MODE_HOST or CUBLAS_POINTER_MODE_DEVICE
(the default cuBLAS mode is CUBLAS_POINTER_MODE_HOST).
"""
err = self._lib.cublasSetPointerMode_v2(self.handle, mode)
if err:
raise CU.error("cublasSetPointerMode_v2", err)
def sgemm(self, transA, transB,
rowsCountA, columnCountB, commonSideLength,
alpha, A, B, beta, C,
strideA=0, strideB=0, strideC=0):
"""Single precision (float) GEneral Matrix Multiplication.
Matrices are always in column order.
C = alpha * dot(A, B) + beta * C
C = alpha * dot(A^T, B) + beta * C
C = alpha * dot(A, B^T) + beta * C
C = alpha * dot(A^T, B^T) + beta * C
alpha, A, B, beta, C can be numpy array, Memory object,
cffi pointer or int.
Parameters:
transA: how matrix A is to be transposed
(CUBLAS_OP_N, CUBLAS_OP_T, CUBLAS_OP_C).
transB: how matrix B is to be transposed
(CUBLAS_OP_N, CUBLAS_OP_T, CUBLAS_OP_C).
rowsCountA: number of rows in matrix A.
columnCountB: number of columns in matrix B.
commonSideLength: length of the common side of the matrices.
alpha: the factor of matrix A.
A: matrix A.
B: matrix B.
beta: the factor of matrix C.
C: Buffer object storing matrix C.
strideA: leading dimension of matrix A:
clblasTrans: >= commonSideLength,
else: >= rowsCountA.
strideB: leading dimension of matrix B:
clblasTrans: >= columnCountB,
else: >= commonSideLength.
strideC: leading dimension of matrix C: >= rowsCountA.
Returns:
None.
"""
if not strideA:
strideA = commonSideLength if transA != CUBLAS_OP_N else rowsCountA
if not strideB:
strideB = (columnCountB if transB != CUBLAS_OP_N
else commonSideLength)
if not strideC:
strideC = rowsCountA
err = self._lib.cublasSgemm_v2(
self.handle, transA, transB, rowsCountA, columnCountB,
commonSideLength, CU.extract_ptr(alpha), A, strideA,
B, strideB, CU.extract_ptr(beta), C, strideC)
if err:
raise CU.error("cublasSgemm_v2", err)
def dgemm(self, transA, transB,
rowsCountA, columnCountB, commonSideLength,
alpha, A, B, beta, C,
strideA=0, strideB=0, strideC=0):
"""Double precision (double) GEneral Matrix Multiplication.
Matrices are always in column order.
C = alpha * dot(A, B) + beta * C
C = alpha * dot(A^T, B) + beta * C
C = alpha * dot(A, B^T) + beta * C
C = alpha * dot(A^T, B^T) + beta * C
alpha, A, B, beta, C can be numpy array, Memory object,
cffi pointer or int.
Parameters:
transA: how matrix A is to be transposed
(CUBLAS_OP_N, CUBLAS_OP_T, CUBLAS_OP_C).
transB: how matrix B is to be transposed
(CUBLAS_OP_N, CUBLAS_OP_T, CUBLAS_OP_C).
rowsCountA: number of rows in matrix A.
columnCountB: number of columns in matrix B.
commonSideLength: length of the common side of the matrices.
alpha: the factor of matrix A.
A: matrix A.
B: matrix B.
beta: the factor of matrix C.
C: Buffer object storing matrix C.
strideA: leading dimension of matrix A:
clblasTrans: >= commonSideLength,
else: >= rowsCountA.
strideB: leading dimension of matrix B:
clblasTrans: >= columnCountB,
else: >= commonSideLength.
strideC: leading dimension of matrix C: >= rowsCountA.
Returns:
None.
"""
if not strideA:
strideA = commonSideLength if transA != CUBLAS_OP_N else rowsCountA
if not strideB:
strideB = (columnCountB if transB != CUBLAS_OP_N
else commonSideLength)
if not strideC:
strideC = rowsCountA
err = self._lib.cublasDgemm_v2(
self.handle, transA, transB, rowsCountA, columnCountB,
commonSideLength, CU.extract_ptr(alpha), A, strideA,
B, strideB, CU.extract_ptr(beta), C, strideC)
if err:
raise CU.error("cublasDgemm_v2", err)
def sgemm_ex(self, transA, transB,
rowsCountA, columnCountB, commonSideLength,
alpha, A, B, beta, C,
strideA=0, strideB=0, strideC=0,
dtypeA=CUBLAS_DATA_HALF, dtypeB=CUBLAS_DATA_HALF,
dtypeC=CUBLAS_DATA_HALF):
"""Single precision (float) GEneral Matrix Multiplication
with support of different data types for each matrix.
Matrices are always in column order.
C = alpha * dot(A, B) + beta * C
C = alpha * dot(A^T, B) + beta * C
C = alpha * dot(A, B^T) + beta * C
C = alpha * dot(A^T, B^T) + beta * C
alpha, A, B, beta, C can be numpy array, Memory object,
cffi pointer or int.
Parameters:
transA: how matrix A is to be transposed
(CUBLAS_OP_N, CUBLAS_OP_T, CUBLAS_OP_C).
transB: how matrix B is to be transposed
(CUBLAS_OP_N, CUBLAS_OP_T, CUBLAS_OP_C).
rowsCountA: number of rows in matrix A.
columnCountB: number of columns in matrix B.
commonSideLength: length of the common side of the matrices.
alpha: the factor of matrix A.
A: matrix A.
B: matrix B.
beta: the factor of matrix C.
C: Buffer object storing matrix C.
strideA: leading dimension of matrix A:
clblasTrans: >= commonSideLength,
else: >= rowsCountA.
strideB: leading dimension of matrix B:
clblasTrans: >= columnCountB,
else: >= commonSideLength.
strideC: leading dimension of matrix C: >= rowsCountA.
dtypeA: data type of matrix A
(CUBLAS_DATA_FLOAT, CUBLAS_DATA_DOUBLE,
CUBLAS_DATA_HALF, CUBLAS_DATA_INT8).
dtypeB: data type of matrix B
(CUBLAS_DATA_FLOAT, CUBLAS_DATA_DOUBLE,
CUBLAS_DATA_HALF, CUBLAS_DATA_INT8).
dtypeC: data type of matrix C
(CUBLAS_DATA_FLOAT, CUBLAS_DATA_DOUBLE,
CUBLAS_DATA_HALF, CUBLAS_DATA_INT8).
Returns:
None.
"""
if not strideA:
strideA = commonSideLength if transA != CUBLAS_OP_N else rowsCountA
if not strideB:
strideB = (columnCountB if transB != CUBLAS_OP_N
else commonSideLength)
if not strideC:
strideC = rowsCountA
err = self._lib.cublasSgemmEx(
self.handle, transA, transB, rowsCountA, columnCountB,
commonSideLength, CU.extract_ptr(alpha), A, dtypeA, strideA,
B, dtypeB, strideB, CU.extract_ptr(beta), C, dtypeC, strideC)
if err:
raise CU.error("cublasSgemmEx", err)
@staticmethod
def gemm(dtype):
import numpy
if dtype == numpy.float32:
return CUBLAS.sgemm
if dtype == numpy.float64:
return CUBLAS.dgemm
if dtype == numpy.float16:
return CUBLAS.sgemm_ex
raise ValueError("Invalid dtype %s" % dtype)
def _release(self):
if self._lib is not None and self.handle is not None:
self._lib.cublasDestroy_v2(self.handle)
self._handle = None
def __del__(self):
if self.context.handle is None:
raise SystemError("Incorrect destructor call order detected")
self._release()
self.context._del_ref(self)
| ajkxyz/cuda4py | src/cuda4py/blas/_cublas.py | Python | bsd-2-clause | 14,674 |
import unittest
from test_vrh import *
from test_spin import *
from test_composite import *
import os, sys
sys.path.insert(1,os.path.abspath('..'))
import burnman
from burnman import minerals
class TestRock(unittest.TestCase):
def test_rock(self):
amount_perovskite = 0.3
rock = burnman.composite( ( ( minerals.SLB_2005.mg_fe_perovskite(0.1), amount_perovskite ),
(minerals.SLB_2005.ferropericlase(0.2), 1.0-amount_perovskite) ) )
rock.set_method('slb2')
(fr,phases)=rock.unroll()
self.assertAlmostEqual(fr[0], 0.3, 2)
self.assertAlmostEqual(fr[1], 0.7, 2)
#class MyTest(unittest.TestCase):
# def test(self):
# self.assertEqual(3, 4)
#class CompareL2(unittest.TestCase):
#class VsVp(unittest.TestCase):
if __name__ == '__main__':
unittest.main()
| tjhei/burnman_old2 | tests/tests.py | Python | gpl-2.0 | 865 |
<<<<<<< HEAD
<<<<<<< HEAD
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Steve English <steve.english@navetas.com> #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.PaginatedList
import github.Gist
import github.Repository
import github.NamedUser
import github.Plan
import github.Organization
import github.Event
class NamedUser(github.GithubObject.CompletableGithubObject):
"""
This class represents NamedUsers as returned for example by http://developer.github.com/v3/todo
"""
@property
def avatar_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._avatar_url)
return self._avatar_url.value
@property
def bio(self):
"""
:type: string
"""
self._completeIfNotSet(self._bio)
return self._bio.value
@property
def blog(self):
"""
:type: string
"""
self._completeIfNotSet(self._blog)
return self._blog.value
@property
def collaborators(self):
"""
:type: integer
"""
self._completeIfNotSet(self._collaborators)
return self._collaborators.value
@property
def company(self):
"""
:type: string
"""
self._completeIfNotSet(self._company)
return self._company.value
@property
def contributions(self):
"""
:type: integer
"""
self._completeIfNotSet(self._contributions)
return self._contributions.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def disk_usage(self):
"""
:type: integer
"""
self._completeIfNotSet(self._disk_usage)
return self._disk_usage.value
@property
def email(self):
"""
:type: string
"""
self._completeIfNotSet(self._email)
return self._email.value
@property
def events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._events_url)
return self._events_url.value
@property
def followers(self):
"""
:type: integer
"""
self._completeIfNotSet(self._followers)
return self._followers.value
@property
def followers_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._followers_url)
return self._followers_url.value
@property
def following(self):
"""
:type: integer
"""
self._completeIfNotSet(self._following)
return self._following.value
@property
def following_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._following_url)
return self._following_url.value
@property
def gists_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._gists_url)
return self._gists_url.value
@property
def gravatar_id(self):
"""
:type: string
"""
self._completeIfNotSet(self._gravatar_id)
return self._gravatar_id.value
@property
def hireable(self):
"""
:type: bool
"""
self._completeIfNotSet(self._hireable)
return self._hireable.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def location(self):
"""
:type: string
"""
self._completeIfNotSet(self._location)
return self._location.value
@property
def login(self):
"""
:type: string
"""
self._completeIfNotSet(self._login)
return self._login.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def organizations_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._organizations_url)
return self._organizations_url.value
@property
def owned_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._owned_private_repos)
return self._owned_private_repos.value
@property
def plan(self):
"""
:type: :class:`github.Plan.Plan`
"""
self._completeIfNotSet(self._plan)
return self._plan.value
@property
def private_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._private_gists)
return self._private_gists.value
@property
def public_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_gists)
return self._public_gists.value
@property
def public_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_repos)
return self._public_repos.value
@property
def received_events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._received_events_url)
return self._received_events_url.value
@property
def repos_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._repos_url)
return self._repos_url.value
@property
def starred_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._starred_url)
return self._starred_url.value
@property
def subscriptions_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._subscriptions_url)
return self._subscriptions_url.value
@property
def total_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._total_private_repos)
return self._total_private_repos.value
@property
def type(self):
"""
:type: string
"""
self._completeIfNotSet(self._type)
return self._type.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def get_events(self):
"""
:calls: `GET /users/:user/events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/events",
None
)
def get_followers(self):
"""
:calls: `GET /users/:user/followers <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser,
self._requester,
self.url + "/followers",
None
)
def get_following(self):
"""
:calls: `GET /users/:user/following <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser,
self._requester,
self.url + "/following",
None
)
def get_gists(self):
"""
:calls: `GET /users/:user/gists <http://developer.github.com/v3/gists>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
"""
return github.PaginatedList.PaginatedList(
github.Gist.Gist,
self._requester,
self.url + "/gists",
None
)
def get_keys(self):
"""
:calls: `GET /users/:user/keys <http://developer.github.com/v3/users/keys>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.UserKey.UserKey`
"""
return github.PaginatedList.PaginatedList(
github.UserKey.UserKey,
self._requester,
self.url + "/keys",
None
)
def get_orgs(self):
"""
:calls: `GET /users/:user/orgs <http://developer.github.com/v3/orgs>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Organization.Organization`
"""
return github.PaginatedList.PaginatedList(
github.Organization.Organization,
self._requester,
self.url + "/orgs",
None
)
def get_public_events(self):
"""
:calls: `GET /users/:user/events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/events/public",
None
)
def get_public_received_events(self):
"""
:calls: `GET /users/:user/received_events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events/public",
None
)
def get_received_events(self):
"""
:calls: `GET /users/:user/received_events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events",
None
)
def get_repo(self, name):
"""
:calls: `GET /repos/:owner/:repo <http://developer.github.com/v3/repos>`_
:param name: string
:rtype: :class:`github.Repository.Repository`
"""
assert isinstance(name, str), name
headers, data = self._requester.requestJsonAndCheck(
"GET",
"/repos/" + self.login + "/" + name
)
return github.Repository.Repository(self._requester, headers, data, completed=True)
def get_repos(self, type=github.GithubObject.NotSet):
"""
:calls: `GET /users/:user/repos <http://developer.github.com/v3/repos>`_
:param type: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
assert type is github.GithubObject.NotSet or isinstance(type, str), type
url_parameters = dict()
if type is not github.GithubObject.NotSet:
url_parameters["type"] = type
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/repos",
url_parameters
)
def get_starred(self):
"""
:calls: `GET /users/:user/starred <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/starred",
None
)
def get_subscriptions(self):
"""
:calls: `GET /users/:user/subscriptions <http://developer.github.com/v3/activity/watching>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/subscriptions",
None
)
def get_watched(self):
"""
:calls: `GET /users/:user/watched <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/watched",
None
)
def has_in_following(self, following):
"""
:calls: `GET /users/:user/following/:target_user <http://developer.github.com/v3/users/followers/#check-if-one-user-follows-another>`_
:param following: :class:`github.NamedUser.NamedUser`
:rtype: bool
"""
assert isinstance(following, github.NamedUser.NamedUser), following
status, headers, data = self._requester.requestJson(
"GET",
self.url + "/following/" + following._identity
)
return status == 204
@property
def _identity(self):
return self.login
def _initAttributes(self):
self._avatar_url = github.GithubObject.NotSet
self._bio = github.GithubObject.NotSet
self._blog = github.GithubObject.NotSet
self._collaborators = github.GithubObject.NotSet
self._company = github.GithubObject.NotSet
self._contributions = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._disk_usage = github.GithubObject.NotSet
self._email = github.GithubObject.NotSet
self._events_url = github.GithubObject.NotSet
self._followers = github.GithubObject.NotSet
self._followers_url = github.GithubObject.NotSet
self._following = github.GithubObject.NotSet
self._following_url = github.GithubObject.NotSet
self._gists_url = github.GithubObject.NotSet
self._gravatar_id = github.GithubObject.NotSet
self._hireable = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._location = github.GithubObject.NotSet
self._login = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._organizations_url = github.GithubObject.NotSet
self._owned_private_repos = github.GithubObject.NotSet
self._plan = github.GithubObject.NotSet
self._private_gists = github.GithubObject.NotSet
self._public_gists = github.GithubObject.NotSet
self._public_repos = github.GithubObject.NotSet
self._received_events_url = github.GithubObject.NotSet
self._repos_url = github.GithubObject.NotSet
self._starred_url = github.GithubObject.NotSet
self._subscriptions_url = github.GithubObject.NotSet
self._total_private_repos = github.GithubObject.NotSet
self._type = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "avatar_url" in attributes: # pragma no branch
self._avatar_url = self._makeStringAttribute(attributes["avatar_url"])
if "bio" in attributes: # pragma no branch
self._bio = self._makeStringAttribute(attributes["bio"])
if "blog" in attributes: # pragma no branch
self._blog = self._makeStringAttribute(attributes["blog"])
if "collaborators" in attributes: # pragma no branch
self._collaborators = self._makeIntAttribute(attributes["collaborators"])
if "company" in attributes: # pragma no branch
self._company = self._makeStringAttribute(attributes["company"])
if "contributions" in attributes: # pragma no branch
self._contributions = self._makeIntAttribute(attributes["contributions"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "disk_usage" in attributes: # pragma no branch
self._disk_usage = self._makeIntAttribute(attributes["disk_usage"])
if "email" in attributes: # pragma no branch
self._email = self._makeStringAttribute(attributes["email"])
if "events_url" in attributes: # pragma no branch
self._events_url = self._makeStringAttribute(attributes["events_url"])
if "followers" in attributes: # pragma no branch
self._followers = self._makeIntAttribute(attributes["followers"])
if "followers_url" in attributes: # pragma no branch
self._followers_url = self._makeStringAttribute(attributes["followers_url"])
if "following" in attributes: # pragma no branch
self._following = self._makeIntAttribute(attributes["following"])
if "following_url" in attributes: # pragma no branch
self._following_url = self._makeStringAttribute(attributes["following_url"])
if "gists_url" in attributes: # pragma no branch
self._gists_url = self._makeStringAttribute(attributes["gists_url"])
if "gravatar_id" in attributes: # pragma no branch
self._gravatar_id = self._makeStringAttribute(attributes["gravatar_id"])
if "hireable" in attributes: # pragma no branch
self._hireable = self._makeBoolAttribute(attributes["hireable"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "location" in attributes: # pragma no branch
self._location = self._makeStringAttribute(attributes["location"])
if "login" in attributes: # pragma no branch
self._login = self._makeStringAttribute(attributes["login"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "organizations_url" in attributes: # pragma no branch
self._organizations_url = self._makeStringAttribute(attributes["organizations_url"])
if "owned_private_repos" in attributes: # pragma no branch
self._owned_private_repos = self._makeIntAttribute(attributes["owned_private_repos"])
if "plan" in attributes: # pragma no branch
self._plan = self._makeClassAttribute(github.Plan.Plan, attributes["plan"])
if "private_gists" in attributes: # pragma no branch
self._private_gists = self._makeIntAttribute(attributes["private_gists"])
if "public_gists" in attributes: # pragma no branch
self._public_gists = self._makeIntAttribute(attributes["public_gists"])
if "public_repos" in attributes: # pragma no branch
self._public_repos = self._makeIntAttribute(attributes["public_repos"])
if "received_events_url" in attributes: # pragma no branch
self._received_events_url = self._makeStringAttribute(attributes["received_events_url"])
if "repos_url" in attributes: # pragma no branch
self._repos_url = self._makeStringAttribute(attributes["repos_url"])
if "starred_url" in attributes: # pragma no branch
self._starred_url = self._makeStringAttribute(attributes["starred_url"])
if "subscriptions_url" in attributes: # pragma no branch
self._subscriptions_url = self._makeStringAttribute(attributes["subscriptions_url"])
if "total_private_repos" in attributes: # pragma no branch
self._total_private_repos = self._makeIntAttribute(attributes["total_private_repos"])
if "type" in attributes: # pragma no branch
self._type = self._makeStringAttribute(attributes["type"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
=======
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Steve English <steve.english@navetas.com> #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.PaginatedList
import github.Gist
import github.Repository
import github.NamedUser
import github.Plan
import github.Organization
import github.Event
class NamedUser(github.GithubObject.CompletableGithubObject):
"""
This class represents NamedUsers as returned for example by http://developer.github.com/v3/todo
"""
@property
def avatar_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._avatar_url)
return self._avatar_url.value
@property
def bio(self):
"""
:type: string
"""
self._completeIfNotSet(self._bio)
return self._bio.value
@property
def blog(self):
"""
:type: string
"""
self._completeIfNotSet(self._blog)
return self._blog.value
@property
def collaborators(self):
"""
:type: integer
"""
self._completeIfNotSet(self._collaborators)
return self._collaborators.value
@property
def company(self):
"""
:type: string
"""
self._completeIfNotSet(self._company)
return self._company.value
@property
def contributions(self):
"""
:type: integer
"""
self._completeIfNotSet(self._contributions)
return self._contributions.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def disk_usage(self):
"""
:type: integer
"""
self._completeIfNotSet(self._disk_usage)
return self._disk_usage.value
@property
def email(self):
"""
:type: string
"""
self._completeIfNotSet(self._email)
return self._email.value
@property
def events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._events_url)
return self._events_url.value
@property
def followers(self):
"""
:type: integer
"""
self._completeIfNotSet(self._followers)
return self._followers.value
@property
def followers_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._followers_url)
return self._followers_url.value
@property
def following(self):
"""
:type: integer
"""
self._completeIfNotSet(self._following)
return self._following.value
@property
def following_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._following_url)
return self._following_url.value
@property
def gists_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._gists_url)
return self._gists_url.value
@property
def gravatar_id(self):
"""
:type: string
"""
self._completeIfNotSet(self._gravatar_id)
return self._gravatar_id.value
@property
def hireable(self):
"""
:type: bool
"""
self._completeIfNotSet(self._hireable)
return self._hireable.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def location(self):
"""
:type: string
"""
self._completeIfNotSet(self._location)
return self._location.value
@property
def login(self):
"""
:type: string
"""
self._completeIfNotSet(self._login)
return self._login.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def organizations_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._organizations_url)
return self._organizations_url.value
@property
def owned_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._owned_private_repos)
return self._owned_private_repos.value
@property
def plan(self):
"""
:type: :class:`github.Plan.Plan`
"""
self._completeIfNotSet(self._plan)
return self._plan.value
@property
def private_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._private_gists)
return self._private_gists.value
@property
def public_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_gists)
return self._public_gists.value
@property
def public_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_repos)
return self._public_repos.value
@property
def received_events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._received_events_url)
return self._received_events_url.value
@property
def repos_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._repos_url)
return self._repos_url.value
@property
def starred_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._starred_url)
return self._starred_url.value
@property
def subscriptions_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._subscriptions_url)
return self._subscriptions_url.value
@property
def total_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._total_private_repos)
return self._total_private_repos.value
@property
def type(self):
"""
:type: string
"""
self._completeIfNotSet(self._type)
return self._type.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def get_events(self):
"""
:calls: `GET /users/:user/events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/events",
None
)
def get_followers(self):
"""
:calls: `GET /users/:user/followers <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser,
self._requester,
self.url + "/followers",
None
)
def get_following(self):
"""
:calls: `GET /users/:user/following <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser,
self._requester,
self.url + "/following",
None
)
def get_gists(self):
"""
:calls: `GET /users/:user/gists <http://developer.github.com/v3/gists>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
"""
return github.PaginatedList.PaginatedList(
github.Gist.Gist,
self._requester,
self.url + "/gists",
None
)
def get_keys(self):
"""
:calls: `GET /users/:user/keys <http://developer.github.com/v3/users/keys>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.UserKey.UserKey`
"""
return github.PaginatedList.PaginatedList(
github.UserKey.UserKey,
self._requester,
self.url + "/keys",
None
)
def get_orgs(self):
"""
:calls: `GET /users/:user/orgs <http://developer.github.com/v3/orgs>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Organization.Organization`
"""
return github.PaginatedList.PaginatedList(
github.Organization.Organization,
self._requester,
self.url + "/orgs",
None
)
def get_public_events(self):
"""
:calls: `GET /users/:user/events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/events/public",
None
)
def get_public_received_events(self):
"""
:calls: `GET /users/:user/received_events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events/public",
None
)
def get_received_events(self):
"""
:calls: `GET /users/:user/received_events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events",
None
)
def get_repo(self, name):
"""
:calls: `GET /repos/:owner/:repo <http://developer.github.com/v3/repos>`_
:param name: string
:rtype: :class:`github.Repository.Repository`
"""
assert isinstance(name, str), name
headers, data = self._requester.requestJsonAndCheck(
"GET",
"/repos/" + self.login + "/" + name
)
return github.Repository.Repository(self._requester, headers, data, completed=True)
def get_repos(self, type=github.GithubObject.NotSet):
"""
:calls: `GET /users/:user/repos <http://developer.github.com/v3/repos>`_
:param type: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
assert type is github.GithubObject.NotSet or isinstance(type, str), type
url_parameters = dict()
if type is not github.GithubObject.NotSet:
url_parameters["type"] = type
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/repos",
url_parameters
)
def get_starred(self):
"""
:calls: `GET /users/:user/starred <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/starred",
None
)
def get_subscriptions(self):
"""
:calls: `GET /users/:user/subscriptions <http://developer.github.com/v3/activity/watching>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/subscriptions",
None
)
def get_watched(self):
"""
:calls: `GET /users/:user/watched <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/watched",
None
)
def has_in_following(self, following):
"""
:calls: `GET /users/:user/following/:target_user <http://developer.github.com/v3/users/followers/#check-if-one-user-follows-another>`_
:param following: :class:`github.NamedUser.NamedUser`
:rtype: bool
"""
assert isinstance(following, github.NamedUser.NamedUser), following
status, headers, data = self._requester.requestJson(
"GET",
self.url + "/following/" + following._identity
)
return status == 204
@property
def _identity(self):
return self.login
def _initAttributes(self):
self._avatar_url = github.GithubObject.NotSet
self._bio = github.GithubObject.NotSet
self._blog = github.GithubObject.NotSet
self._collaborators = github.GithubObject.NotSet
self._company = github.GithubObject.NotSet
self._contributions = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._disk_usage = github.GithubObject.NotSet
self._email = github.GithubObject.NotSet
self._events_url = github.GithubObject.NotSet
self._followers = github.GithubObject.NotSet
self._followers_url = github.GithubObject.NotSet
self._following = github.GithubObject.NotSet
self._following_url = github.GithubObject.NotSet
self._gists_url = github.GithubObject.NotSet
self._gravatar_id = github.GithubObject.NotSet
self._hireable = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._location = github.GithubObject.NotSet
self._login = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._organizations_url = github.GithubObject.NotSet
self._owned_private_repos = github.GithubObject.NotSet
self._plan = github.GithubObject.NotSet
self._private_gists = github.GithubObject.NotSet
self._public_gists = github.GithubObject.NotSet
self._public_repos = github.GithubObject.NotSet
self._received_events_url = github.GithubObject.NotSet
self._repos_url = github.GithubObject.NotSet
self._starred_url = github.GithubObject.NotSet
self._subscriptions_url = github.GithubObject.NotSet
self._total_private_repos = github.GithubObject.NotSet
self._type = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "avatar_url" in attributes: # pragma no branch
self._avatar_url = self._makeStringAttribute(attributes["avatar_url"])
if "bio" in attributes: # pragma no branch
self._bio = self._makeStringAttribute(attributes["bio"])
if "blog" in attributes: # pragma no branch
self._blog = self._makeStringAttribute(attributes["blog"])
if "collaborators" in attributes: # pragma no branch
self._collaborators = self._makeIntAttribute(attributes["collaborators"])
if "company" in attributes: # pragma no branch
self._company = self._makeStringAttribute(attributes["company"])
if "contributions" in attributes: # pragma no branch
self._contributions = self._makeIntAttribute(attributes["contributions"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "disk_usage" in attributes: # pragma no branch
self._disk_usage = self._makeIntAttribute(attributes["disk_usage"])
if "email" in attributes: # pragma no branch
self._email = self._makeStringAttribute(attributes["email"])
if "events_url" in attributes: # pragma no branch
self._events_url = self._makeStringAttribute(attributes["events_url"])
if "followers" in attributes: # pragma no branch
self._followers = self._makeIntAttribute(attributes["followers"])
if "followers_url" in attributes: # pragma no branch
self._followers_url = self._makeStringAttribute(attributes["followers_url"])
if "following" in attributes: # pragma no branch
self._following = self._makeIntAttribute(attributes["following"])
if "following_url" in attributes: # pragma no branch
self._following_url = self._makeStringAttribute(attributes["following_url"])
if "gists_url" in attributes: # pragma no branch
self._gists_url = self._makeStringAttribute(attributes["gists_url"])
if "gravatar_id" in attributes: # pragma no branch
self._gravatar_id = self._makeStringAttribute(attributes["gravatar_id"])
if "hireable" in attributes: # pragma no branch
self._hireable = self._makeBoolAttribute(attributes["hireable"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "location" in attributes: # pragma no branch
self._location = self._makeStringAttribute(attributes["location"])
if "login" in attributes: # pragma no branch
self._login = self._makeStringAttribute(attributes["login"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "organizations_url" in attributes: # pragma no branch
self._organizations_url = self._makeStringAttribute(attributes["organizations_url"])
if "owned_private_repos" in attributes: # pragma no branch
self._owned_private_repos = self._makeIntAttribute(attributes["owned_private_repos"])
if "plan" in attributes: # pragma no branch
self._plan = self._makeClassAttribute(github.Plan.Plan, attributes["plan"])
if "private_gists" in attributes: # pragma no branch
self._private_gists = self._makeIntAttribute(attributes["private_gists"])
if "public_gists" in attributes: # pragma no branch
self._public_gists = self._makeIntAttribute(attributes["public_gists"])
if "public_repos" in attributes: # pragma no branch
self._public_repos = self._makeIntAttribute(attributes["public_repos"])
if "received_events_url" in attributes: # pragma no branch
self._received_events_url = self._makeStringAttribute(attributes["received_events_url"])
if "repos_url" in attributes: # pragma no branch
self._repos_url = self._makeStringAttribute(attributes["repos_url"])
if "starred_url" in attributes: # pragma no branch
self._starred_url = self._makeStringAttribute(attributes["starred_url"])
if "subscriptions_url" in attributes: # pragma no branch
self._subscriptions_url = self._makeStringAttribute(attributes["subscriptions_url"])
if "total_private_repos" in attributes: # pragma no branch
self._total_private_repos = self._makeIntAttribute(attributes["total_private_repos"])
if "type" in attributes: # pragma no branch
self._type = self._makeStringAttribute(attributes["type"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Steve English <steve.english@navetas.com> #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.PaginatedList
import github.Gist
import github.Repository
import github.NamedUser
import github.Plan
import github.Organization
import github.Event
class NamedUser(github.GithubObject.CompletableGithubObject):
"""
This class represents NamedUsers as returned for example by http://developer.github.com/v3/todo
"""
@property
def avatar_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._avatar_url)
return self._avatar_url.value
@property
def bio(self):
"""
:type: string
"""
self._completeIfNotSet(self._bio)
return self._bio.value
@property
def blog(self):
"""
:type: string
"""
self._completeIfNotSet(self._blog)
return self._blog.value
@property
def collaborators(self):
"""
:type: integer
"""
self._completeIfNotSet(self._collaborators)
return self._collaborators.value
@property
def company(self):
"""
:type: string
"""
self._completeIfNotSet(self._company)
return self._company.value
@property
def contributions(self):
"""
:type: integer
"""
self._completeIfNotSet(self._contributions)
return self._contributions.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def disk_usage(self):
"""
:type: integer
"""
self._completeIfNotSet(self._disk_usage)
return self._disk_usage.value
@property
def email(self):
"""
:type: string
"""
self._completeIfNotSet(self._email)
return self._email.value
@property
def events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._events_url)
return self._events_url.value
@property
def followers(self):
"""
:type: integer
"""
self._completeIfNotSet(self._followers)
return self._followers.value
@property
def followers_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._followers_url)
return self._followers_url.value
@property
def following(self):
"""
:type: integer
"""
self._completeIfNotSet(self._following)
return self._following.value
@property
def following_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._following_url)
return self._following_url.value
@property
def gists_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._gists_url)
return self._gists_url.value
@property
def gravatar_id(self):
"""
:type: string
"""
self._completeIfNotSet(self._gravatar_id)
return self._gravatar_id.value
@property
def hireable(self):
"""
:type: bool
"""
self._completeIfNotSet(self._hireable)
return self._hireable.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def location(self):
"""
:type: string
"""
self._completeIfNotSet(self._location)
return self._location.value
@property
def login(self):
"""
:type: string
"""
self._completeIfNotSet(self._login)
return self._login.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def organizations_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._organizations_url)
return self._organizations_url.value
@property
def owned_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._owned_private_repos)
return self._owned_private_repos.value
@property
def plan(self):
"""
:type: :class:`github.Plan.Plan`
"""
self._completeIfNotSet(self._plan)
return self._plan.value
@property
def private_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._private_gists)
return self._private_gists.value
@property
def public_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_gists)
return self._public_gists.value
@property
def public_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_repos)
return self._public_repos.value
@property
def received_events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._received_events_url)
return self._received_events_url.value
@property
def repos_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._repos_url)
return self._repos_url.value
@property
def starred_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._starred_url)
return self._starred_url.value
@property
def subscriptions_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._subscriptions_url)
return self._subscriptions_url.value
@property
def total_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._total_private_repos)
return self._total_private_repos.value
@property
def type(self):
"""
:type: string
"""
self._completeIfNotSet(self._type)
return self._type.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def get_events(self):
"""
:calls: `GET /users/:user/events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/events",
None
)
def get_followers(self):
"""
:calls: `GET /users/:user/followers <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser,
self._requester,
self.url + "/followers",
None
)
def get_following(self):
"""
:calls: `GET /users/:user/following <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser,
self._requester,
self.url + "/following",
None
)
def get_gists(self):
"""
:calls: `GET /users/:user/gists <http://developer.github.com/v3/gists>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
"""
return github.PaginatedList.PaginatedList(
github.Gist.Gist,
self._requester,
self.url + "/gists",
None
)
def get_keys(self):
"""
:calls: `GET /users/:user/keys <http://developer.github.com/v3/users/keys>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.UserKey.UserKey`
"""
return github.PaginatedList.PaginatedList(
github.UserKey.UserKey,
self._requester,
self.url + "/keys",
None
)
def get_orgs(self):
"""
:calls: `GET /users/:user/orgs <http://developer.github.com/v3/orgs>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Organization.Organization`
"""
return github.PaginatedList.PaginatedList(
github.Organization.Organization,
self._requester,
self.url + "/orgs",
None
)
def get_public_events(self):
"""
:calls: `GET /users/:user/events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/events/public",
None
)
def get_public_received_events(self):
"""
:calls: `GET /users/:user/received_events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events/public",
None
)
def get_received_events(self):
"""
:calls: `GET /users/:user/received_events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events",
None
)
def get_repo(self, name):
"""
:calls: `GET /repos/:owner/:repo <http://developer.github.com/v3/repos>`_
:param name: string
:rtype: :class:`github.Repository.Repository`
"""
assert isinstance(name, str), name
headers, data = self._requester.requestJsonAndCheck(
"GET",
"/repos/" + self.login + "/" + name
)
return github.Repository.Repository(self._requester, headers, data, completed=True)
def get_repos(self, type=github.GithubObject.NotSet):
"""
:calls: `GET /users/:user/repos <http://developer.github.com/v3/repos>`_
:param type: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
assert type is github.GithubObject.NotSet or isinstance(type, str), type
url_parameters = dict()
if type is not github.GithubObject.NotSet:
url_parameters["type"] = type
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/repos",
url_parameters
)
def get_starred(self):
"""
:calls: `GET /users/:user/starred <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/starred",
None
)
def get_subscriptions(self):
"""
:calls: `GET /users/:user/subscriptions <http://developer.github.com/v3/activity/watching>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/subscriptions",
None
)
def get_watched(self):
"""
:calls: `GET /users/:user/watched <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/watched",
None
)
def has_in_following(self, following):
"""
:calls: `GET /users/:user/following/:target_user <http://developer.github.com/v3/users/followers/#check-if-one-user-follows-another>`_
:param following: :class:`github.NamedUser.NamedUser`
:rtype: bool
"""
assert isinstance(following, github.NamedUser.NamedUser), following
status, headers, data = self._requester.requestJson(
"GET",
self.url + "/following/" + following._identity
)
return status == 204
@property
def _identity(self):
return self.login
def _initAttributes(self):
self._avatar_url = github.GithubObject.NotSet
self._bio = github.GithubObject.NotSet
self._blog = github.GithubObject.NotSet
self._collaborators = github.GithubObject.NotSet
self._company = github.GithubObject.NotSet
self._contributions = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._disk_usage = github.GithubObject.NotSet
self._email = github.GithubObject.NotSet
self._events_url = github.GithubObject.NotSet
self._followers = github.GithubObject.NotSet
self._followers_url = github.GithubObject.NotSet
self._following = github.GithubObject.NotSet
self._following_url = github.GithubObject.NotSet
self._gists_url = github.GithubObject.NotSet
self._gravatar_id = github.GithubObject.NotSet
self._hireable = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._location = github.GithubObject.NotSet
self._login = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._organizations_url = github.GithubObject.NotSet
self._owned_private_repos = github.GithubObject.NotSet
self._plan = github.GithubObject.NotSet
self._private_gists = github.GithubObject.NotSet
self._public_gists = github.GithubObject.NotSet
self._public_repos = github.GithubObject.NotSet
self._received_events_url = github.GithubObject.NotSet
self._repos_url = github.GithubObject.NotSet
self._starred_url = github.GithubObject.NotSet
self._subscriptions_url = github.GithubObject.NotSet
self._total_private_repos = github.GithubObject.NotSet
self._type = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "avatar_url" in attributes: # pragma no branch
self._avatar_url = self._makeStringAttribute(attributes["avatar_url"])
if "bio" in attributes: # pragma no branch
self._bio = self._makeStringAttribute(attributes["bio"])
if "blog" in attributes: # pragma no branch
self._blog = self._makeStringAttribute(attributes["blog"])
if "collaborators" in attributes: # pragma no branch
self._collaborators = self._makeIntAttribute(attributes["collaborators"])
if "company" in attributes: # pragma no branch
self._company = self._makeStringAttribute(attributes["company"])
if "contributions" in attributes: # pragma no branch
self._contributions = self._makeIntAttribute(attributes["contributions"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "disk_usage" in attributes: # pragma no branch
self._disk_usage = self._makeIntAttribute(attributes["disk_usage"])
if "email" in attributes: # pragma no branch
self._email = self._makeStringAttribute(attributes["email"])
if "events_url" in attributes: # pragma no branch
self._events_url = self._makeStringAttribute(attributes["events_url"])
if "followers" in attributes: # pragma no branch
self._followers = self._makeIntAttribute(attributes["followers"])
if "followers_url" in attributes: # pragma no branch
self._followers_url = self._makeStringAttribute(attributes["followers_url"])
if "following" in attributes: # pragma no branch
self._following = self._makeIntAttribute(attributes["following"])
if "following_url" in attributes: # pragma no branch
self._following_url = self._makeStringAttribute(attributes["following_url"])
if "gists_url" in attributes: # pragma no branch
self._gists_url = self._makeStringAttribute(attributes["gists_url"])
if "gravatar_id" in attributes: # pragma no branch
self._gravatar_id = self._makeStringAttribute(attributes["gravatar_id"])
if "hireable" in attributes: # pragma no branch
self._hireable = self._makeBoolAttribute(attributes["hireable"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "location" in attributes: # pragma no branch
self._location = self._makeStringAttribute(attributes["location"])
if "login" in attributes: # pragma no branch
self._login = self._makeStringAttribute(attributes["login"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "organizations_url" in attributes: # pragma no branch
self._organizations_url = self._makeStringAttribute(attributes["organizations_url"])
if "owned_private_repos" in attributes: # pragma no branch
self._owned_private_repos = self._makeIntAttribute(attributes["owned_private_repos"])
if "plan" in attributes: # pragma no branch
self._plan = self._makeClassAttribute(github.Plan.Plan, attributes["plan"])
if "private_gists" in attributes: # pragma no branch
self._private_gists = self._makeIntAttribute(attributes["private_gists"])
if "public_gists" in attributes: # pragma no branch
self._public_gists = self._makeIntAttribute(attributes["public_gists"])
if "public_repos" in attributes: # pragma no branch
self._public_repos = self._makeIntAttribute(attributes["public_repos"])
if "received_events_url" in attributes: # pragma no branch
self._received_events_url = self._makeStringAttribute(attributes["received_events_url"])
if "repos_url" in attributes: # pragma no branch
self._repos_url = self._makeStringAttribute(attributes["repos_url"])
if "starred_url" in attributes: # pragma no branch
self._starred_url = self._makeStringAttribute(attributes["starred_url"])
if "subscriptions_url" in attributes: # pragma no branch
self._subscriptions_url = self._makeStringAttribute(attributes["subscriptions_url"])
if "total_private_repos" in attributes: # pragma no branch
self._total_private_repos = self._makeIntAttribute(attributes["total_private_repos"])
if "type" in attributes: # pragma no branch
self._type = self._makeStringAttribute(attributes["type"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| ArcherSys/ArcherSys | Lib/site-packages/github/NamedUser.py | Python | mit | 67,937 |
# Copyright 2010 by Dana Larose
# This file is part of crashRun.
# crashRun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# crashRun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with crashRun. If not, see <http://www.gnu.org/licenses/>.
from random import random
from random import randrange
from .Agent import ExperimentalHoboInfiltrationDroid41K
from .GameLevel import GameLevel
from .GameLevel import ItemChart
from . import MonsterFactory
from . import SubnetNode
from . import Terrain
from .Terrain import TerrainFactory
from .Terrain import DOWN_STAIRS
from .Terrain import FLOOR
from .Terrain import SECURITY_CAMERA
from .Terrain import TERMINAL
from .Terrain import UP_STAIRS
from .TowerFactory import TowerFactory
from .Util import AudioAlert
class OldComplexLevel(GameLevel):
def __init__(self, dm, level_num, length, width):
GameLevel.__init__(self, dm, level_num, length, width, 'old complex')
def __add_items_to_level(self):
_chart = ItemChart()
_chart.common_items[0] = ('shotgun shell', 7)
_chart.common_items[1] = ('medkit', 0)
_chart.common_items[2] = ('old fatigues', 0)
_chart.common_items[3] = ('flare', 0)
_chart.common_items[4] = ('baseball bat', 0)
_chart.common_items[5] = ('shotgun shell', 7)
_chart.common_items[6] = ('medkit', 0)
_chart.uncommon_items[0] = ('army helmet', 0)
_chart.uncommon_items[1] = ('amphetamine', 5)
_chart.uncommon_items[2] = ('combat boots', 0)
_chart.uncommon_items[3] = ('lockpick', 0)
_chart.uncommon_items[4] = ('stimpak', 0)
_chart.uncommon_items[5] = ('long leather coat', 0)
_chart.uncommon_items[6] = ('flashlight', 0)
_chart.uncommon_items[7] = ('throwing knife', 2)
_chart.uncommon_items[8] = ('instant coffee', 0)
_chart.uncommon_items[9] = ('leather gloves', 0)
_chart.rare_items[0] = ('grenade', 3)
_chart.rare_items[1] = ('C4 Charge', 0)
_chart.rare_items[2] = ('flak jacket', 0)
_chart.rare_items[3] = ('chainsaw', 0)
_chart.rare_items[4] = ('battery', 3)
_chart.rare_items[5] = ('grenade', 2)
_chart.rare_items[6] = ('battery', 2)
_chart.rare_items[7] = ('rubber boots', 0)
_chart.rare_items[8] = ('flash bomb', 2)
_chart.rare_items[8] = ('Addidas sneakers', 0)
_chart.rare_items[9] = ('machine gun clip', 0)
_chart.rare_items[10] = ('9mm clip', 0)
_chart.rare_items[11] = ('m1911a1', 0)
_chart.rare_items[12] = ('taser', 0)
[self.add_item(_chart) for k in range(randrange(5,10))]
def __add_subnet_nodes(self):
_rnd = randrange(0,6)
if _rnd < 3:
self.subnet_nodes.append(SubnetNode.LameSubnetNode())
elif _rnd == 3:
self.subnet_nodes.append(SubnetNode.get_skill_node('Dance'))
elif _rnd == 4:
self.subnet_nodes.append(SubnetNode.StatBuilderNode())
else:
self.subnet_nodes.append(SubnetNode.get_skill_node())
self.subnet_nodes.append(SubnetNode.RobotGrandCentral())
def __get_monster(self, _monster_level):
if _monster_level < 4:
rnd = randrange(0, 8)
else:
rnd = randrange(4, 20)
if rnd in range(0,2):
return MonsterFactory.get_monster_by_name(self.dm,'feral dog', 0, 0)
elif rnd in range(2,4):
return MonsterFactory.get_monster_by_name(self.dm,'junkie', 0, 0)
elif rnd in range(4,6):
return MonsterFactory.get_monster_by_name(self.dm,'extra large cockroach', 0, 0)
elif rnd in range(6,8):
return MonsterFactory.get_monster_by_name(self.dm,'mutant rat', 0, 0)
elif rnd in range(8,10):
return MonsterFactory.get_monster_by_name(self.dm,'dust head', 0, 0)
elif rnd in range(10,12):
return MonsterFactory.get_monster_by_name(self.dm,'mutant mutt', 0, 0)
elif rnd in range(12,14):
return MonsterFactory.get_monster_by_name(self.dm,'damaged security bot', 0, 0)
elif rnd in range(14,17):
return MonsterFactory.get_monster_by_name(self.dm,'mutant', 0, 0)
elif rnd in range(17,19):
return MonsterFactory.get_monster_by_name(self.dm,'reanimated mailroom clerk', 0, 0)
else:
return MonsterFactory.get_monster_by_name(self.dm,'surveillance drone', 0, 0)
def add_monster(self):
_monster_level = self.level_num
if _monster_level > 2:
rnd = random()
if rnd < 0.05:
_monster_level += 3
elif rnd < 0.10:
_monster_level += 2
elif rnd < 0.20:
_monster_level += 1
elif rnd > 0.95:
_monster_level -= 1
GameLevel.add_monster(self, self.__get_monster(_monster_level))
def __add_monsters(self):
for j in range(randrange(15,31)):
self.add_monster()
def __bust_up_level(self):
maxDestruction = (500 - self.level_num) // 2
minDestruction = maxDestruction // 2
l = self.lvl_length - 1
w = self.lvl_width - 1
_tf = Terrain.TerrainFactory()
for x in range(randrange(minDestruction, maxDestruction)):
r = randrange(1,l)
c = randrange(1,w)
if self.map[r][c].get_type() not in (UP_STAIRS,DOWN_STAIRS,SECURITY_CAMERA,TERMINAL):
self.map[r][c] = _tf.get_terrain_tile(FLOOR)
def __generate_map(self):
_tower = TowerFactory(self.lvl_length, self.lvl_width, False, False)
self.map = _tower.gen_map()
self.entrance = _tower.upStairs
self.exit = _tower.downStairs
self.__bust_up_level()
def add_EHID41K(self):
_odds = float(self.level_num - 2) / 4
_r = random()
if _r < _odds:
self.dm.player.remember('EHID41K')
_droid = ExperimentalHoboInfiltrationDroid41K(self.dm, 0, 0)
GameLevel.add_monster(self, _droid)
def generate_level(self):
self.__generate_map()
for j in range(randrange(3,7)):
self.add_feature_to_map(Terrain.Terminal())
for j in range(randrange(3,7)):
_cam = Terrain.SecurityCamera(5, True)
self.cameras[j] = _cam
self.add_feature_to_map(_cam)
# add a few traps, maybe
if self.level_num > 2:
for j in range(3):
if randrange(4) == 0:
self.place_sqr(Terrain.ConcussionMine(), FLOOR)
self.__add_items_to_level()
self.__add_monsters()
self.__add_subnet_nodes()
if random() < 0.25:
self.map[self.exit[0]][self.exit[1]].activated = False
if not self.dm.player.has_memory('EHID41K'):
self.add_EHID41K()
def dispatch_security_bots(self):
for x in range(randrange(1,6)):
GameLevel.add_monster(self, MonsterFactory.get_monster_by_name(self.dm,'damaged security bot',0,0))
def begin_security_lockdown(self):
if self.security_active and not self.security_lockdown:
self.security_lockdown = True
self.disable_lifts()
self.dispatch_security_bots()
if self.dm.player.curr_level == self.level_num:
alert = AudioAlert(self.dm.player.row, self.dm.player.col, 'An alarm begins to sound.', '')
alert.show_alert(self.dm, False)
for _m in self.monsters:
_m.attitude = 'hostile'
| DanaL/crashRun | src/OldComplex.py | Python | gpl-3.0 | 8,287 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class KeyVaultKeyReference(Model):
"""Describes a reference to Key Vault Key.
All required parameters must be populated in order to send to Azure.
:param key_url: Required. The URL referencing a key encryption key in Key
Vault.
:type key_url: str
:param source_vault: Required. The relative URL of the Key Vault
containing the key.
:type source_vault:
~azure.mgmt.compute.v2016_04_30_preview.models.SubResource
"""
_validation = {
'key_url': {'required': True},
'source_vault': {'required': True},
}
_attribute_map = {
'key_url': {'key': 'keyUrl', 'type': 'str'},
'source_vault': {'key': 'sourceVault', 'type': 'SubResource'},
}
def __init__(self, *, key_url: str, source_vault, **kwargs) -> None:
super(KeyVaultKeyReference, self).__init__(**kwargs)
self.key_url = key_url
self.source_vault = source_vault
| lmazuel/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/key_vault_key_reference_py3.py | Python | mit | 1,446 |
#from __future__ import absolute_import
#from . import initializations
#from . import layers
#from . import models
#from . import regularizers
#from . import trainings
__version__ = '0.3.2'
| yhalk/vw_challenge_ECR | src/jetson/acol/__init__.py | Python | apache-2.0 | 191 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='StockSubscription',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('symbol', models.CharField(max_length=8)),
('lastNotified', models.DateTimeField(auto_now_add=True)),
('notificationsPerDay', models.IntegerField()),
],
),
]
| bellisk/hypothesis-django-example | mysite/example/migrations/0001_initial.py | Python | mit | 632 |
def greater_than(left, right):
return left if left > right else right
def get_largest_product_in_number_line(number_line):
current_product = reduce(lambda x, y: x * y, number_line[:4])
largest_product = current_product
for j in range(4, len(number_line)):
# current_product /= number_line[j - 4]
# current_product *= number_line[j]
current_product = number_line[j] *\
number_line[j - 1] *\
number_line[j - 2] *\
number_line[j - 3]
if current_product > largest_product:
largest_product = current_product
return largest_product
row_width = 20
col_length = row_width
number_grid = []
for i in range(col_length):
line = raw_input().split()
number_grid.append(map(int, line))
# we've prepped the grid. It's time to process the rows, columns, and diagonals:
# horizontals:
product = reduce(greater_than,
# horizontals:
[get_largest_product_in_number_line(number_grid[i])
for i in range(row_width)] +
# verticals:
[get_largest_product_in_number_line(
[number_grid[j][i]
for j in range(col_length)])
for i in range(row_width)] +
# backslash diagonals:
[get_largest_product_in_number_line(
[number_grid[j][j + i]
for j in range(col_length - i)])
for i in range(row_width - 3)] +
[get_largest_product_in_number_line(
[number_grid[j + i][j]
for j in range(row_width - i)])
for i in range(col_length - 3)] +
# forwardslash diagonals:
[get_largest_product_in_number_line(
[number_grid[j][i - j]
for j in range(i + 1)])
for i in range(3, row_width)] +
[get_largest_product_in_number_line(
[number_grid[j][i + row_width - 1 - j]
for j in range(i, row_width)])
for i in range(col_length - 3)])
print(product)
| PisoMojado/ProjectEuler- | src/euler/problems/problem11.py | Python | gpl-2.0 | 2,460 |
""" QDialog for "attaching" additional metadata from CSV file
"""
import logging
import os
from PyQt4 import QtCore, QtGui
import numpy as np
from ..ui_attach_md import Ui_AttachMd
from ..logger import qgis_log
from ..ts_driver.ts_manager import tsm
logger = logging.getLogger('tstools')
class AttachMetadata(QtGui.QDialog, Ui_AttachMd):
""" Plot symbology metadata attacher """
metadata_attached = QtCore.pyqtSignal()
def __init__(self, iface):
# Qt setup
self.iface = iface
QtGui.QDialog.__init__(self)
self.setupUi(self)
# Metadata file
self.metadata_file = os.getcwd()
self.metadata_header = True
self.metadata_delim = ','
self.md = None
self.colnames = None
# Finish setup
self.setup_gui()
def setup_gui(self):
""" Finish initializing GUI """
# Open metadata GUI
self.edit_metadata.setText(self.metadata_file)
self.edit_delim.setText(self.metadata_delim)
self.cbox_header.setChecked(QtCore.Qt.Checked if self.metadata_header
else QtCore.Qt.Unchecked)
self.but_browse.clicked.connect(self.find_metadata)
self.but_load.clicked.connect(self.load_metadata)
# Match buttons
self.match_buttons = QtGui.QButtonGroup()
self.match_buttons.addButton(self.rad_ID)
self.match_buttons.addButton(self.rad_date)
self.rad_ID.setChecked(True)
# Add metadata button
self.but_add_metadata.clicked.connect(self.add_metadata)
@QtCore.pyqtSlot()
def find_metadata(self):
""" Open QFileDialog to find a metadata file """
# Open QFileDialog
metadata = str(QtGui.QFileDialog.getOpenFileName(self,
'Locate metadata file',
self.metadata_file if os.path.isdir(self.metadata_file)
else os.path.dirname(self.metadata_file)))
if metadata != '':
self.edit_metadata.setText(metadata)
@QtCore.pyqtSlot()
def load_metadata(self):
""" Try to load metadata file specified by QLineEdit
Wraps `self.try_load_metadata` by handling rest of GUI after
success/failure
"""
# Try to open metadata file
success = self.try_load_metadata()
if success:
self.rad_ID.setEnabled(True)
self.rad_date.setEnabled(True)
self.table_metadata.setEnabled(True)
else:
self.rad_ID.setEnabled(False)
self.rad_date.setEnabled(False)
self.table_metadata.setEnabled(False)
return
# Load table with metadata
self.table_metadata.setSelectionBehavior(
QtGui.QAbstractItemView.SelectColumns)
self.table_metadata.setColumnCount(len(self.colnames))
self.table_metadata.setRowCount(self.md.shape[0])
self.table_metadata.setHorizontalHeaderLabels(self.colnames)
for (r, c), v in np.ndenumerate(self.md):
item = QtGui.QTableWidgetItem(str(v))
item.setTextAlignment(QtCore.Qt.AlignHCenter)
self.table_metadata.setItem(r, c, item)
self.table_metadata.selectColumn(0)
def try_load_metadata(self):
""" Try to load metadata file specified by QLineEdit """
# Get current value
metadata = str(self.edit_metadata.text())
# Get delimiter
delim = str(self.edit_delim.text())
# Get header indicator
header = (True if self.cbox_header.checkState() == QtCore.Qt.Checked
else False)
# Try to open
try:
md = np.genfromtxt(metadata, dtype=str, delimiter=delim,
skip_header=1 if header else 0,
autostrip=True)
except:
qgis_log('Could not parse metadata file',
level=logging.WARNING, duration=5)
raise
return False
if header:
try:
with open(metadata, 'r') as f:
colnames = f.readline().split(delim)
except:
qgis_log('Could not parse metadata header',
logging.WARNING, 5)
raise
return False
else:
colnames = ['Column ' + str(i + 1) for i in range(md.shape[1])]
if not len(colnames) == md.shape[1]:
msg = ('Metadata file has more column headers ({c})'
' than fields ({f})'.format(c=len(colnames), f=md.shape[1]))
qgis_log(msg, logging.WARNING, 5)
return False
self.metadata_file = metadata
self.md = md
self.colnames = list(colnames)
return True
@QtCore.pyqtSlot()
def add_metadata(self):
""" """
# Try to match metadata
ts_match_var = (tsm.ts.images['id'] if self.rad_ID.isChecked() is True
else tsm.ts.images['date'])
# Match column
match_col = self.table_metadata.selectedItems()[0].column()
md_match_var = self.md[:, match_col]
# Try to match
if len(ts_match_var) != len(md_match_var):
msg = 'Wrong number of elements to match ({t} vs. {m})'.format(
t=len(ts_match_var), m=len(md_match_var))
qgis_log(msg, logging.WARNING, 5)
return
if not np.all(np.sort(ts_match_var) == np.sort(md_match_var)):
msg = 'Not all elements match'
qgis_log(msg, logging.WARNING, 5)
return
# Perform match
match_ind = []
for i in xrange(len(ts_match_var)):
ind = np.where(md_match_var == ts_match_var[i])[0]
if len(ind) > 1:
msg = 'Multiple index matches for {m}'.format(
m=ts_match_var[i])
qgis_log(msg, logging.WARNING, 5)
return
match_ind.append(ind[0])
match_ind = np.array(match_ind)
# Sort
self.md_sorted = self.md[match_ind, :]
# Add to timeseries
for i, md in enumerate(self.colnames):
# Ignore match column
if i == match_col:
continue
if md not in tsm.ts.metadata and not hasattr(tsm.ts, md):
tsm.ts.metadata.append(md)
tsm.ts.metadata_str.append(md)
setattr(tsm.ts, md, self.md_sorted[:, i])
else:
msg = 'TS already has metadata item {m}'.format(m=md)
qgis_log(msg, logging.WARNING, 5)
# Emit
self.metadata_attached.emit()
| ceholden/TSTools | tstools/src/controls/attach_md.py | Python | gpl-2.0 | 6,679 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Usage:
python app.py <user_id>
"""
import json
import os
import requests
import sys
from bs4 import BeautifulSoup
import concurrent.futures
def crawl(user_id, items=[], max_id=None):
url = 'https://twitter.com/i/profiles/show/' + user_id + '/media_timeline' + ('?&max_id=' + max_id if max_id is not None else '')
media = json.loads(requests.get(url).text)
soup = BeautifulSoup(media['items_html'])
tags = soup.find_all(attrs={'data-resolved-url-large': True})
items.extend([tag['data-resolved-url-large'] for tag in tags]);
if 'has_more_items' not in media or media['has_more_items'] is False:
return items
else:
if 'max_id' not in media or media['max_id'] < 1:
max_id = soup.find_all(attrs={'data-tweet-id': True})[-1]['data-tweet-id']
else:
max_id = media['max_id']
return crawl(user_id, items, max_id)
def download(url, save_dir='./'):
if not os.path.exists(save_dir):
os.makedirs(save_dir)
base_name = url.split('/')[-1].split(':')[0]
file_path = os.path.join(save_dir, base_name)
with open(file_path, 'wb') as file:
print 'Downloading ' + base_name
bytes = requests.get(url).content
file.write(bytes)
if __name__ == '__main__':
user_id = sys.argv[1]
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
future_to_url = dict( (executor.submit(download, url, './' + user_id), url) for url in crawl(user_id) )
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
if future.exception() is not None:
print '%r generated an exception: %s' % (url, future.exception())
| rarcega/twitter-scraper | app.py | Python | unlicense | 1,778 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from .app.views import FooView
urlpatterns = patterns('',
url(r'^$', FooView.as_view(template_name='home.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^ratings/', include('star_ratings.urls', namespace='ratings', app_name='ratings')),
)
| webu/django-star-ratings | demo/demo/urls.py | Python | bsd-3-clause | 361 |
#!/usr/bin/env python3
"""
Copyright (c) 2014 by nurupo <nurupo.contributions@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import socket
import sys
if sys.version_info[0] == 2:
print("This script requires Python 3+ in order to run.")
sys.exit(1)
def print_help() -> None:
"""Print program usage to stdout."""
print("Usage: " + sys.argv[0] + " <ipv4|ipv6> <ip/hostname> <port>")
print(" Example: " + sys.argv[0] + " ipv4 192.210.149.121 33445")
print(" Example: " + sys.argv[0] + " ipv4 23.226.230.47 33445")
print(" Example: " + sys.argv[0] + " ipv4 node.tox.biribiri.org 33445")
print(" Example: " + sys.argv[0] + " ipv4 cerberus.zodiaclabs.org 33445")
print(" Example: " + sys.argv[0] + " ipv6 2604:180:1::3ded:b280 33445")
print("")
print("Return values:")
print(" 0 - received info reply from a node")
print(" 1 - incorrect command line arguments")
print(" 2 - didn't receive any reply from a node")
print(" 3 - received a malformed/unexpected reply")
# https://github.com/irungentoo/toxcore/blob/4940c4c62b6014d1f0586aa6aca7bf6e4ecfcf29/toxcore/network.h#L128
INFO_PACKET_ID = b"\xF0"
# https://github.com/irungentoo/toxcore/blob/881b2d900d1998981fb6b9938ec66012d049635f/other/bootstrap_node_packets.c#L28
INFO_REQUEST_PACKET_LENGTH = 78
# first byte is INFO_REQUEST_ID, other bytes don't matter as long as reqest's
# length matches INFO_REQUEST_LENGTH
INFO_REQUEST_PACKET = INFO_PACKET_ID + (
b"0" * (INFO_REQUEST_PACKET_LENGTH - len(INFO_PACKET_ID)))
PACKET_ID_LENGTH = len(INFO_PACKET_ID)
# https://github.com/irungentoo/toxcore/blob/881b2d900d1998981fb6b9938ec66012d049635f/other/bootstrap_node_packets.c#L44
VERSION_LENGTH = 4
# https://github.com/irungentoo/toxcore/blob/881b2d900d1998981fb6b9938ec66012d049635f/other/bootstrap_node_packets.c#L26
MAX_MOTD_LENGTH = 256
MAX_INFO_RESPONSE_PACKET_LENGTH = PACKET_ID_LENGTH + VERSION_LENGTH + MAX_MOTD_LENGTH
SOCK_TIMEOUT_SECONDS = 1.0
def main(protocol: str, host: str, port: int) -> None:
"""Call the bootstrap node info RPC and output the response."""
if protocol == "ipv4":
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
elif protocol == "ipv6":
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
else:
print("Invalid first argument")
print_help()
sys.exit(1)
sock.sendto(INFO_REQUEST_PACKET, (host, port))
sock.settimeout(SOCK_TIMEOUT_SECONDS)
try:
data, _ = sock.recvfrom(MAX_INFO_RESPONSE_PACKET_LENGTH)
except socket.timeout:
print("The DHT bootstrap node didn't reply in " +
str(SOCK_TIMEOUT_SECONDS) + " sec.")
print("The likely reason for that is that the DHT bootstrap node "
"is either offline or has no info set.")
sys.exit(2)
packet_id = data[:PACKET_ID_LENGTH]
if packet_id != INFO_PACKET_ID:
print("Bad response, first byte should be {info_packet_id!r}"
" but got {packet_id!r}({data!r})".format(
info_packet_id=INFO_PACKET_ID,
packet_id=packet_id,
data=data,
))
print("Are you sure that you are pointing the script at a Tox "
"DHT bootstrap node and that the script is up to date?")
sys.exit(3)
version = int.from_bytes(data[PACKET_ID_LENGTH:PACKET_ID_LENGTH +
VERSION_LENGTH],
byteorder="big")
motd = data[PACKET_ID_LENGTH + VERSION_LENGTH:].decode("utf-8")
print("Version: " + str(version))
print("MOTD: " + motd)
sys.exit(0)
if __name__ == "__main__":
if len(sys.argv) != 4:
print_help()
sys.exit(1)
main(
protocol=sys.argv[1],
host=sys.argv[2],
port=int(sys.argv[3]),
)
| TokTok/toxcore | other/fun/bootstrap_node_info.py | Python | gpl-3.0 | 4,847 |
coord = (47.606165, -122.332233); # Assignment
print "coord =", coord
## Tuple item access
# - Indexing
print "Latitude =", coord[0], \
", Longitude =", coord[1]
(latitude, longitude) = coord; # - Unpacking
print "Latitude =", latitude, \
", Longitude =", longitude
| safl/chplforpyp-docs | docs/source/examples/tuples.py | Python | apache-2.0 | 357 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from heat.common.i18n import _
from heat.common import exception
from heat.engine import function
_in_progress = object()
class Conditions(object):
def __init__(self, conditions_dict):
assert isinstance(conditions_dict, collections.abc.Mapping)
self._conditions = conditions_dict
self._resolved = {}
def validate(self):
for name, cond in self._conditions.items():
self._check_condition_type(name, cond)
function.validate(cond)
def _resolve(self, condition_name):
resolved = function.resolve(self._conditions[condition_name])
self._check_condition_type(condition_name, resolved)
return resolved
def _check_condition_type(self, condition_name, condition_defn):
if not isinstance(condition_defn, (bool, function.Function)):
msg_data = {'cd': condition_name, 'definition': condition_defn}
message = _('The definition of condition "%(cd)s" is invalid: '
'%(definition)s') % msg_data
raise exception.StackValidationFailed(
error='Condition validation error',
message=message)
def is_enabled(self, condition_name):
if condition_name is None:
return True
if isinstance(condition_name, bool):
return condition_name
if not (isinstance(condition_name, str) and
condition_name in self._conditions):
raise ValueError(_('Invalid condition "%s"') % condition_name)
if condition_name not in self._resolved:
self._resolved[condition_name] = _in_progress
self._resolved[condition_name] = self._resolve(condition_name)
result = self._resolved[condition_name]
if result is _in_progress:
message = _('Circular definition for condition '
'"%s"') % condition_name
raise exception.StackValidationFailed(
error='Condition validation error',
message=message)
return result
def __repr__(self):
return 'Conditions(%r)' % self._conditions
| openstack/heat | heat/engine/conditions.py | Python | apache-2.0 | 2,745 |
# Copyright 2015 Rackspace.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from neutron.common import exceptions as n_exc
from neutron.extensions import portbindings
from neutron.i18n import _LW
from neutron.plugins.common import constants
from oslo_log import log as logging
import oslo_messaging as messaging
from neutron_lbaas.db.loadbalancer import loadbalancer_dbv2
from neutron_lbaas.db.loadbalancer import models as db_models
from neutron_lbaas.services.loadbalancer import data_models
LOG = logging.getLogger(__name__)
class LoadBalancerCallbacks(object):
# history
# 1.0 Initial version
target = messaging.Target(version='1.0')
def __init__(self, plugin):
super(LoadBalancerCallbacks, self).__init__()
self.plugin = plugin
def get_ready_devices(self, context, host=None):
with context.session.begin(subtransactions=True):
agents = self.plugin.db.get_lbaas_agents(
context, filters={'host': [host]})
if not agents:
return []
elif len(agents) > 1:
LOG.warning(_LW('Multiple lbaas agents found on host %s'),
host)
loadbalancers = self.plugin.db.list_loadbalancers_on_lbaas_agent(
context, agents[0].id)
loadbalancer_ids = [
l.id for l in loadbalancers]
qry = context.session.query(
loadbalancer_dbv2.models.LoadBalancer.id)
qry = qry.filter(
loadbalancer_dbv2.models.LoadBalancer.id.in_(
loadbalancer_ids))
qry = qry.filter(
loadbalancer_dbv2.models.LoadBalancer.provisioning_status.in_(
constants.ACTIVE_PENDING_STATUSES))
up = True # makes pep8 and sqlalchemy happy
qry = qry.filter(
loadbalancer_dbv2.models.LoadBalancer.admin_state_up == up)
return [id for id, in qry]
def get_loadbalancer(self, context, loadbalancer_id=None):
lb_model = self.plugin.db.get_loadbalancer(context, loadbalancer_id)
if lb_model.vip_port and lb_model.vip_port.fixed_ips:
for fixed_ip in lb_model.vip_port.fixed_ips:
subnet_dict = self.plugin.db._core_plugin.get_subnet(
context, fixed_ip.subnet_id
)
setattr(fixed_ip, 'subnet', data_models.Subnet.from_dict(
subnet_dict))
if lb_model.provider:
device_driver = self.plugin.drivers[
lb_model.provider.provider_name].device_driver
setattr(lb_model.provider, 'device_driver', device_driver)
lb_dict = lb_model.to_dict(stats=False)
return lb_dict
def loadbalancer_deployed(self, context, loadbalancer_id):
with context.session.begin(subtransactions=True):
qry = context.session.query(db_models.LoadBalancer)
qry = qry.filter_by(id=loadbalancer_id)
loadbalancer = qry.one()
# set all resources to active
if (loadbalancer.provisioning_status in
constants.ACTIVE_PENDING_STATUSES):
loadbalancer.provisioning_status = constants.ACTIVE
if loadbalancer.listeners:
for l in loadbalancer.listeners:
if (l.provisioning_status in
constants.ACTIVE_PENDING_STATUSES):
l.provisioning_status = constants.ACTIVE
if (l.default_pool
and l.default_pool.provisioning_status in
constants.ACTIVE_PENDING_STATUSES):
l.default_pool.provisioning_status = constants.ACTIVE
if l.default_pool.members:
for m in l.default_pool.members:
if (m.provisioning_status in
constants.ACTIVE_PENDING_STATUSES):
m.provisioning_status = constants.ACTIVE
if l.default_pool.healthmonitor:
hm = l.default_pool.healthmonitor
ps = hm.provisioning_status
if ps in constants.ACTIVE_PENDING_STATUSES:
(l.default_pool.healthmonitor
.provisioning_status) = constants.ACTIVE
def update_status(self, context, obj_type, obj_id,
provisioning_status=None, operating_status=None):
if not provisioning_status and not operating_status:
LOG.warning(_LW('update_status for %(obj_type)s %(obj_id)s called '
'without specifying provisioning_status or '
'operating_status') % {'obj_type': obj_type,
'obj_id': obj_id})
return
model_mapping = {
'loadbalancer': db_models.LoadBalancer,
'pool': db_models.PoolV2,
'listener': db_models.Listener,
'member': db_models.MemberV2,
'healthmonitor': db_models.HealthMonitorV2
}
if obj_type not in model_mapping:
raise n_exc.Invalid(_('Unknown object type: %s') % obj_type)
try:
self.plugin.db.update_status(
context, model_mapping[obj_type], obj_id,
provisioning_status=provisioning_status,
operating_status=operating_status)
except n_exc.NotFound:
# update_status may come from agent on an object which was
# already deleted from db with other request
LOG.warning(_LW('Cannot update status: %(obj_type)s %(obj_id)s '
'not found in the DB, it was probably deleted '
'concurrently'),
{'obj_type': obj_type, 'obj_id': obj_id})
def loadbalancer_destroyed(self, context, loadbalancer_id=None):
"""Agent confirmation hook that a load balancer has been destroyed.
This method exists for subclasses to change the deletion
behavior.
"""
pass
def plug_vip_port(self, context, port_id=None, host=None):
if not port_id:
return
try:
port = self.plugin.db._core_plugin.get_port(
context,
port_id
)
except n_exc.PortNotFound:
LOG.debug('Unable to find port %s to plug.', port_id)
return
port['admin_state_up'] = True
port['device_owner'] = 'neutron:' + constants.LOADBALANCERV2
port['device_id'] = str(uuid.uuid5(uuid.NAMESPACE_DNS, str(host)))
port[portbindings.HOST_ID] = host
self.plugin.db._core_plugin.update_port(
context,
port_id,
{'port': port}
)
def unplug_vip_port(self, context, port_id=None, host=None):
if not port_id:
return
try:
port = self.plugin.db._core_plugin.get_port(
context,
port_id
)
except n_exc.PortNotFound:
LOG.debug('Unable to find port %s to unplug. This can occur when '
'the Vip has been deleted first.',
port_id)
return
port['admin_state_up'] = False
port['device_owner'] = ''
port['device_id'] = ''
try:
self.plugin.db._core_plugin.update_port(
context,
port_id,
{'port': port}
)
except n_exc.PortNotFound:
LOG.debug('Unable to find port %s to unplug. This can occur when '
'the Vip has been deleted first.',
port_id)
def update_loadbalancer_stats(self, context,
loadbalancer_id=None,
stats=None):
self.plugin.db.update_loadbalancer_stats(context, loadbalancer_id,
stats)
| gotostack/neutron-lbaas | neutron_lbaas/drivers/common/agent_callbacks.py | Python | apache-2.0 | 8,743 |
# Part of Patient Flow.
# See LICENSE file for full copyright and licensing details.
from openerp.tests import common
from datetime import datetime as dt
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as dtf
import logging
_logger = logging.getLogger(__name__)
from faker import Faker
fake = Faker()
seed = fake.random_int(min=0, max=9999999)
def next_seed():
global seed
seed += 1
return seed
class TestOpsPatientFlow(common.SingleTransactionCase):
@classmethod
def setUpClass(cls):
super(TestOpsPatientFlow, cls).setUpClass()
cr, uid = cls.cr, cls.uid
cls.users_pool = cls.registry('res.users')
cls.groups_pool = cls.registry('res.groups')
cls.partner_pool = cls.registry('res.partner')
cls.activity_pool = cls.registry('nh.activity')
cls.patient_pool = cls.registry('nh.clinical.patient')
cls.location_pool = cls.registry('nh.clinical.location')
cls.pos_pool = cls.registry('nh.clinical.pos')
cls.spell_pool = cls.registry('nh.clinical.spell')
# OPERATIONS DATA MODELS
cls.referral_pool = cls.registry('nh.clinical.patient.referral')
cls.form_pool = cls.registry('nh.clinical.patient.referral.form')
cls.tci_pool = cls.registry('nh.clinical.patient.tci')
cls.apidemo = cls.registry('nh.clinical.api.demo')
cls.patient_ids = cls.apidemo.build_unit_test_env2(cr, uid)
cls.wu_id = cls.location_pool.search(cr, uid, [('code', '=', 'U')])[0]
cls.wt_id = cls.location_pool.search(cr, uid, [('code', '=', 'T')])[0]
cls.pos_id = cls.location_pool.read(cr, uid, cls.wu_id, ['pos_id'])['pos_id'][0]
cls.pos_location_id = cls.pos_pool.read(cr, uid, cls.pos_id, ['location_id'])['location_id'][0]
cls.ru_id = cls.users_pool.search(cr, uid, [('login', '=', 'RU')])[0] #Receptionist on ward U
cls.rt_id = cls.users_pool.search(cr, uid, [('login', '=', 'RT')])[0] #Receptionist on ward T
cls.jdu_id = cls.users_pool.search(cr, uid, [('login', '=', 'JDU')])[0] #Junior Doctor on ward U
cls.jdt_id = cls.users_pool.search(cr, uid, [('login', '=', 'JDT')])[0] #Junior Doctor on ward T
cls.cu_id = cls.users_pool.search(cr, uid, [('login', '=', 'CU')])[0] #Consultant on ward U
cls.ct_id = cls.users_pool.search(cr, uid, [('login', '=', 'CT')])[0] #Consultant on ward T
cls.regu_id = cls.users_pool.search(cr, uid, [('login', '=', 'REGU')])[0] #Registrar on ward U
cls.regt_id = cls.users_pool.search(cr, uid, [('login', '=', 'REGT')])[0] #Registrar on ward T
cls.refteam_id = cls.users_pool.search(cr, uid, [('login', '=', 'RT1')])[0] #Referral Team User
cls.adt_id = cls.users_pool.search(cr, uid, [('groups_id.name', 'in', ['NH Clinical ADT Group']), ('pos_id', '=', cls.pos_id)])[0]
def test_referral_form(self):
cr, uid = self.cr, self.uid
# Submit an empty form
form_id = self.form_pool.create(cr, self.refteam_id, {})
self.assertTrue(form_id, msg="Referral form not created")
form = self.form_pool.browse(cr, uid, form_id)
self.assertTrue(form.source == 'gp', msg="Referral form created: incorrect default source")
self.assertTrue(form.gender == 'NSP', msg="Referral form created: incorrect default gender")
self.assertTrue(form.ethnicity == 'Z', msg="Referral form created: incorrect default ethnicity")
self.assertTrue(form.patient_id, msg="Referral form created: patient not created automatically")
self.assertTrue(form.patient_id.gender == 'NSP', msg="Referral form created: incorrect default patient gender")
self.assertTrue(form.patient_id.ethnicity == 'Z', msg="Referral form created: incorrect default patient ethnicity")
self.assertTrue(form.source == 'gp', msg="Referral form created: incorrect default source")
referral_id = self.referral_pool.search(cr, uid, [['patient_id', '=', form.patient_id.id]])
self.assertTrue(referral_id, msg="Referral form created: referral activity not triggered")
referral = self.referral_pool.browse(cr, uid, referral_id[0])
self.assertTrue(referral.form_id.id == form_id, msg="Referral triggered: referral form not linked correctly")
# Submit a form for an existing patient
patient_ids = self.patient_ids
patient_id = fake.random_element(patient_ids)
form_data = {
'patient_id': patient_id,
'gender': 'U',
'middle_names': 'John'
}
try:
form_id = self.form_pool.create(cr, self.refteam_id, form_data)
except Exception as e:
self.assertTrue(e.args[1].startswith("Cannot submit form. The values in the form do not match the selected patient data"), msg="Unexpected reaction to attempt to create a form with an existing patient (not matching data)!")
else:
assert False, "Form successfully created with an existing patient (not matching data)!"
patient = self.patient_pool.browse(cr, uid, patient_id)
form_data = {
'nhs_number': patient.patient_identifier,
'hospital_number': '0000000001'
}
try:
form_id = self.form_pool.create(cr, self.refteam_id, form_data)
except Exception as e:
self.assertTrue(e.args[1].startswith("Cannot submit form. There is already a patient in the system with that NHS number"), msg="Unexpected reaction to attempt to create a form with an existing patient (not matching data)!")
else:
assert False, "Form successfully created with an existing patient (not matching data)!"
form_data = {
'nhs_number': '0000000001',
'hospital_number': patient.other_identifier
}
try:
form_id = self.form_pool.create(cr, self.refteam_id, form_data)
except Exception as e:
self.assertTrue(e.args[1].startswith("Cannot submit form. There is already a patient in the system with that hospital number"), msg="Unexpected reaction to attempt to create a form with an existing patient (not matching data)!")
else:
assert False, "Form successfully created with an existing patient (not matching data)!"
def test_referral(self):
cr, uid = self.cr, self.uid
patient_ids = self.patient_ids
patient_id = fake.random_element(patient_ids)
code = str(fake.random_int(min=1000001, max=9999999))
spell_data = {
'patient_id': patient_id,
'pos_id': self.pos_id,
'code': code,
'start_date': dt.now().strftime(dtf)}
spell_activity_id = self.spell_pool.create_activity(cr, uid, {}, spell_data)
self.activity_pool.start(cr, uid, spell_activity_id)
# Patient To Come In
tci_data = {
'location_id': self.wu_id,
'patient_id': patient_id
}
tci_activity_id = self.tci_pool.create_activity(cr, uid, {'pos_id': self.pos_id}, {})
self.activity_pool.submit(cr, self.ru_id, tci_activity_id, tci_data)
check_tci = self.activity_pool.browse(cr, uid, tci_activity_id)
# test tci activity submitted data
self.assertTrue(check_tci.data_ref.patient_id.id == patient_id, msg="Patient To Come In: Patient id was not submitted correctly")
self.assertTrue(check_tci.data_ref.location_id.id == self.wu_id, msg="Patient To Come In: location id was not submitted correctly")
# Complete Patient To Come In
self.activity_pool.complete(cr, self.ru_id, tci_activity_id)
check_tci = self.activity_pool.browse(cr, uid, tci_activity_id)
self.assertTrue(check_tci.state == 'completed', msg="Patient To Come In not completed successfully")
self.assertTrue(check_tci.date_terminated, msg="Patient To Come In Completed: Date terminated not registered")
# test spell data
check_spell = self.activity_pool.browse(cr, uid, spell_activity_id)
self.assertTrue(check_spell.data_ref.location_id.id == self.wu_id, msg= "Patient To Come In Completed: Spell location not registered correctly")
# test patient data
check_patient = self.patient_pool.browse(cr, uid, patient_id)
self.assertTrue(check_patient.current_location_id.id == self.wu_id, msg= "Patient To Come In Completed: Patient current location not registered correctly") | NeovaHealth/patientflow | nh_patient_flow/tests/test_operations.py | Python | agpl-3.0 | 8,482 |
class GrammaticalError(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
class QuotationError(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
#class NounificationError(Exception):
# def __init__(self, expression, message):
# self.expression = expression
# self.message = message
| ProjetPP/PPP-QuestionParsing-Grammatical | ppp_questionparsing_grammatical/data/exceptions.py | Python | agpl-3.0 | 450 |
""" simple non-constant constant. Ie constant which does not get annotated as constant
"""
from rpython.rtyper.extregistry import ExtRegistryEntry
from rpython.flowspace.model import Constant
from rpython.annotator.model import not_const
class NonConstant(object):
def __init__(self, _constant):
self.__dict__['constant'] = _constant
def __getattr__(self, attr):
return getattr(self.__dict__['constant'], attr)
def __setattr__(self, attr, value):
setattr(self.__dict__['constant'], attr, value)
def __nonzero__(self):
return bool(self.__dict__['constant'])
def __eq__(self, other):
return self.__dict__['constant'] == other
def __add__(self, other):
return self.__dict__['constant'] + other
def __radd__(self, other):
return other + self.__dict__['constant']
def __mul__(self, other):
return self.__dict__['constant'] * other
class EntryNonConstant(ExtRegistryEntry):
_about_ = NonConstant
def compute_result_annotation(self, s_arg):
return not_const(s_arg)
def specialize_call(self, hop):
hop.exception_cannot_occur()
return hop.inputarg(hop.r_result, arg=0)
| oblique-labs/pyVM | rpython/rlib/nonconst.py | Python | mit | 1,205 |
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .rna_type import *
from .extent import *
from .location import *
from .cluster import *
from .state import *
from .context import *
from .methods import *
| RNAcentral/rnacentral-import-pipeline | rnacentral_pipeline/rnacentral/genes/data/__init__.py | Python | apache-2.0 | 773 |
#!/usr/bin/env python
# -*- enc: utf-8 -*-
import sys, logging, glob
from PyQt4 import QtCore, QtGui
#from pytestqt.qt_compat import qWarning
import gui
import metadata
import xmeml
from gui import main
logging.basicConfig(level=logging.WARNING)
def test_basic_window(qtbot, tmpdir):
app = QtGui.QApplication([])
odo = main.Odometer(app)
odo.show()
qtbot.addWidget(odo)
#qtbot.mouseClick(odo.ui.loadFileButton, QtCore.Qt.LeftButton)
for _x in glob.glob('xmemlsamples/*.xml'):
with qtbot.waitSignal(odo.loaded, timeout=10000) as blocker:
odo.loadxml(_x) # load xmeml in thread
# xmeml fully loaded here
# run some tests to check the health of what we loaded
#assert len(odo.audioclips) > 0
| havardgulldahl/pling-plong-odometer | test/test_gui.py | Python | gpl-3.0 | 765 |
from libdotfiles.packages import try_install
from libdotfiles.util import run
try_install("bluez")
try_install("bluez-utils")
try_install("blueman")
run(["sudo", "systemctl", "enable", "bluetooth"])
run(["sudo", "systemctl", "start", "bluetooth"])
run(
[
"sudo",
"sh",
"-c",
'sed -i "s/#AutoEnable=false/AutoEnable=true/" /etc/bluetooth/main.conf',
]
)
| rr-/dotfiles | cfg/bluetooth/__main__.py | Python | mit | 396 |
"""
examples of reportlab document using
BaseDocTemplate with
2 PageTemplate (one and two columns)
"""
import os
from reportlab.platypus import BaseDocTemplate, Frame, Paragraph, NextPageTemplate, PageBreak, PageTemplate
from reportlab.lib.units import inch
from reportlab.lib.styles import getSampleStyleSheet
styles=getSampleStyleSheet()
Elements=[]
doc = BaseDocTemplate('basedoc.pdf',showBoundary=1)
def foot1(canvas,doc):
canvas.saveState()
canvas.setFont('Times-Roman',19)
canvas.drawString(inch, 0.75 * inch, "Page %d" % doc.page)
canvas.restoreState()
def foot2(canvas,doc):
canvas.saveState()
canvas.setFont('Times-Roman',9)
canvas.drawString(inch, 0.75 * inch, "Page %d" % doc.page)
canvas.restoreState()
#normal frame as for SimpleFlowDocument
frameT = Frame(doc.leftMargin, doc.bottomMargin, doc.width, doc.height, id='normal')
#Two Columns
frame1 = Frame(doc.leftMargin, doc.bottomMargin, doc.width/2-6, doc.height, id='col1')
frame2 = Frame(doc.leftMargin+doc.width/2+6, doc.bottomMargin, doc.width/2-6,
doc.height, id='col2')
Elements.append(Paragraph("Frame one column, "*500,styles['Normal']))
Elements.append(NextPageTemplate('TwoCol'))
Elements.append(PageBreak())
Elements.append(Paragraph("Frame two columns, "*500,styles['Normal']))
Elements.append(NextPageTemplate('OneCol'))
Elements.append(PageBreak())
Elements.append(Paragraph("Une colonne",styles['Normal']))
doc.addPageTemplates([PageTemplate(id='OneCol',frames=frameT,onPage=foot1),
PageTemplate(id='TwoCol',frames=[frame1,frame2],onPage=foot2),
])
#start the construction of the pdf
doc.build(Elements)
# use external program xpdf to view the generated pdf
os.system("xpdf basedoc.pdf")
| ActiveState/code | recipes/Python/123612_BaseDocTemplate_2/recipe-123612.py | Python | mit | 1,791 |
import os
import time
from unittest.mock import patch
import zmq
import pytest
from zerolog.receiver import Receiver
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
def test_main_receiver(context):
"""Receiver should correctly run"""
sender = context.socket(zmq.PUB)
sender.bind("tcp://127.0.0.1:6700")
worker = context.socket(zmq.PULL)
worker.connect("tcp://127.0.0.1:6200")
time.sleep(1)
receiver = Receiver("127.0.0.1", 6700, output_port=6200)
sender.send_multipart([b"topic", b"test"])
data = receiver.recv_data()
assert data is not None
receiver.ventilator.send(data)
data = worker.recv()
assert data is not None
sender.send_multipart([b"topic", b"test"])
receiver.ventilator = None # remove socket to force exception to be raised
with pytest.raises(AttributeError):
receiver.run()
@patch('zerolog.receiver.zmq.Context.socket')
def test_receiver_error(socket):
"""Receiver should correctly raise errors"""
with pytest.raises(TypeError):
Receiver("127.0.0.1", 6700, output_port=0, output_socket="bad.sock")
@patch('zerolog.receiver.zmq.Context.socket')
def test_receiver_ipc(socket):
"""Receiver should be able to use ipc socket"""
Receiver("127.0.0.1", 6700, output_socket="/tmp/test.sock")
@patch('zerolog.receiver.zmq.Context.socket')
def test_receiver_no_args(socket):
"""Receiver should be able to instanciate without output arguments"""
Receiver("127.0.0.1", 6700)
@patch('zerolog.receiver.zmq.Socket.bind')
def test_receiver_log_config(bind):
"""Receiver should be able to use logging configuration file"""
cf = os.path.join(BASE_DIR, "fixtures/log.cfg")
Receiver("127.0.0.1", 6700, output_socket="/tmp/test.sock", logging_config=cf)
| TheGhouls/zerolog | tests/test_receiver.py | Python | mit | 1,791 |
import urllib.request, urllib.parse, urllib.error
from pyparsing import makeHTMLTags, SkipTo
# read HTML from a web page
serverListPage = urllib.request.urlopen( "http://www.yahoo.com" )
htmlText = serverListPage.read()
serverListPage.close()
# using makeHTMLTags to define opening and closing tags
anchorStart,anchorEnd = makeHTMLTags("a")
# compose an expression for an anchored reference
anchor = anchorStart + SkipTo(anchorEnd)("body") + anchorEnd
# use scanString to scan through the HTML source, extracting
# just the anchor tags and their associated body text
# (note the href attribute of the opening A tag is available
# as an attribute in the returned parse results)
for tokens,start,end in anchor.scanString(htmlText):
print(tokens.body,'->',tokens.href)
| miguelalexanderdiaz/lenguajes_project | pyparsing-2.0.2/examples/makeHTMLTagExample.py | Python | gpl-2.0 | 796 |
import os
import sys
import mock
from nose.tools import with_setup, raises, ok_, eq_
from atve.application import AtveTestRunner
from atve.workspace import Workspace
from atve.exception import *
class TestAtveTestRunner(object):
@classmethod
def setup(cls):
cls.runner = AtveTestRunner()
cls.root = os.path.normpath(os.path.join(os.path.dirname(__file__)))
cls.script_path = os.path.join(cls.root, "data")
cls.workspace = Workspace(os.path.join(cls.root, "workspace"))
cls.report_path = cls.workspace.mkdir("report")
@classmethod
def teardown(cls):
cls.workspace.rmdir("")
@with_setup(setup, teardown)
def test_atvetestrunner_execute_success_01(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute("success.py", self.script_path, v=0)
@with_setup(setup, teardown)
def test_atvetestrunner_execute_success_02(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute("failed.py", self.script_path, v=0)
@with_setup(setup, teardown)
def test_atvetestrunner_execute_success_03(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute("notdefine.py", self.script_path, v=0)
@with_setup(setup, teardown)
def test_atvetestrunner_execute_success_04(self):
self.runner.execute("notdefine", self.script_path)
@with_setup(setup, teardown)
@raises(TestRunnerError)
def test_atvetestrunner_execute_failed_01(self):
self.runner.execute("notexists.py", self.script_path)
@with_setup(setup, teardown)
@raises(TestRunnerError)
def test_atvetestrunner_execute_failed_02(self):
self.runner.execute("success.py", self.workspace.mkdir("script"))
@with_setup(setup, teardown)
@raises(TestRunnerError)
def test_atvetestrunner_execute_failed_03(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute("not.pydefine", self.script_path, v=0)
@with_setup(setup, teardown)
def test_atvetestrunner_execute_with_report_success_01(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute_with_report(
"success.py", self.script_path, self.report_path)
ok_(len(os.listdir(self.report_path)) > 0)
@with_setup(setup, teardown)
def test_atvetestrunner_execute_with_report_success_02(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute_with_report(
"failed.py", self.script_path, self.report_path)
ok_(len(os.listdir(self.report_path)) > 0)
@with_setup(setup, teardown)
def test_atvetestrunner_execute_with_report_success_03(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute_with_report(
"notdefine.py", self.script_path, self.report_path)
ok_(len(os.listdir(self.report_path)) == 0)
@with_setup(setup, teardown)
def test_atvetestrunner_execute_with_report_success_04(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute_with_report(
"notdefine", self.script_path, self.report_path)
ok_(len(os.listdir(self.report_path)) == 0)
@with_setup(setup, teardown)
@raises(TestRunnerError)
def test_atvetestrunner_execute_with_report_failed_01(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute_with_report(
"notexists.py", self.script_path, self.report_path)
@with_setup(setup, teardown)
@raises(TestRunnerError)
def test_atvetestrunner_execute_with_report_failed_02(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute_with_report(
"success.py", self.workspace.mkdir("script"), self.report_path)
@with_setup(setup, teardown)
@raises(TestRunnerError)
def test_atvetestrunner_execute_with_report_failed_03(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute_with_report(
"success.py", self.script_path, os.path.join(self.workspace.root(), "hoge"))
@with_setup(setup, teardown)
@raises(TestRunnerError)
def test_atvetestrunner_execute_with_report_failed_04(self):
with mock.patch('sys.argv', ['atvetestrunner.py', 'notdefine.py']):
self.runner.execute_with_report(
"not.pydefine", self.script_path, self.report_path)
| TE-ToshiakiTanaka/atve | test/test_atvetestrunner.py | Python | mit | 4,794 |
#!/usr/bin/env python
# coding: utf-8
import os
import sys
import json
import shutil
max_partition = 3
def check_remove_create(loc):
if os.path.exists(loc):
shutil.rmtree(loc)
os.mkdir(loc)
return loc
def check_create_dir(location_dir, sub_dir):
loc = os.path.join(location_dir, sub_dir)
return check_remove_create(loc)
def mk_sample_log(location):
return check_create_dir(location, "log")
def mk_sample_instance(location):
return check_create_dir(location, "phx")
def mk_sample_data(location):
return check_create_dir(location, "data")
def mk_sample(sample_location):
return check_create_dir(sample_location, "sample")
def mk_config(location):
return check_create_dir(location, "config")
def mk_pf_config(config_loc, instance_loc, bloom_loc, index):
config_file = os.path.join(config_loc, "pfconfig%d.json" % int(index))
config = {
"instance_path": check_create_dir(instance_loc, "phx%d" % int(index)),
"bloom_path": check_create_dir(bloom_loc, "bloom%d" % int(index)),
"bloom_max_items": 1000000,
"bloom_error_rate": 0.00001,
"bloom_hash_seed": 0,
"bloom_max_partitions": max_partition,
"bloom_hash_func": "murmurhash128",
}
with open(config_file, "w") as conf:
json.dump(config, conf)
return config_file
def mk_phxpf_config(config_loc, phxlog_loc, syslog_loc, pfconf_file, index):
config_file = os.path.join(config_loc, "phxpf%d.json" % int(index))
config = {
"io_thread_count": 1,
"batch_size": 1000,
"batch_timeout_ms": 10,
"use_master": 1,
"log_level": 3,
"max_log_size": 128, # m
"rpc_addr": "0.0.0.0:%d" % (11261 + int(index) - 1),
"nodeaddr": "0.0.0.0:%d" % (11111 + int(index) - 1),
"nodelist": "0.0.0.0:11111,0.0.0.0:11112,0.0.0.0:11113",
"paxos_log_path": check_create_dir(phxlog_loc, "phx%d" % int(index)),
"sys_log_path": check_create_dir(syslog_loc, "log%d" % int(index)),
"pf_config_file": pfconf_file,
}
with open(config_file, "w") as conf:
json.dump(config, conf)
return config_file
def usage(argv):
print "Usage: %s <sample_dir> <nodenum>" % argv
exit(-1)
def create_config_with_dir(work_dir, nodenum):
sample_work_dir = mk_sample(work_dir)
sample_config_dir = mk_config(sample_work_dir)
sample_log_dir = mk_sample_log(sample_work_dir)
sample_instance_dir = mk_sample_instance(sample_work_dir)
sample_data_dir = mk_sample_data(sample_work_dir)
for i in range(1, int(nodenum) + 1):
pf_config = mk_pf_config(
sample_config_dir, sample_instance_dir, sample_data_dir, i)
phxpf_cofnig = mk_phxpf_config(
sample_config_dir, sample_log_dir, sample_log_dir, pf_config, i)
print "partition filter config: %s done" % pf_config
print "phxpaxos partition filter config: %s done" % phxpf_cofnig
if __name__ == "__main__":
print sys.argv
if len(sys.argv) < 3:
usage(sys.argv[0])
work_dir = sys.argv[1]
nodenum = int(sys.argv[2])
if len(sys.argv) > 3:
max_partition = int(sys.argv[3])
check_remove_create(work_dir)
create_config_with_dir(work_dir, nodenum)
| ns-xlz/nginx_by_xlz | config/sample.py | Python | gpl-3.0 | 3,285 |
""" Tests for utils. """
import collections
import copy
import mock
from datetime import datetime, timedelta
from pytz import UTC
from django.test import TestCase
from django.test.utils import override_settings
from contentstore import utils
from contentstore.tests.utils import CourseTestCase
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from opaque_keys.edx.locations import SlashSeparatedCourseKey, Location
from xmodule.modulestore.django import modulestore
class LMSLinksTestCase(TestCase):
""" Tests for LMS links. """
def about_page_test(self):
""" Get URL for about page, no marketing site """
# default for ENABLE_MKTG_SITE is False.
self.assertEquals(self.get_about_page_link(), "//localhost:8000/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={'ROOT': 'dummy-root'})
def about_page_marketing_site_test(self):
""" Get URL for about page, marketing root present. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), "//dummy-root/courses/mitX/101/test/about")
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': False}):
self.assertEquals(self.get_about_page_link(), "//localhost:8000/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={'ROOT': 'http://www.dummy'})
def about_page_marketing_site_remove_http_test(self):
""" Get URL for about page, marketing root present, remove http://. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), "//www.dummy/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={'ROOT': 'https://www.dummy'})
def about_page_marketing_site_remove_https_test(self):
""" Get URL for about page, marketing root present, remove https://. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), "//www.dummy/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={'ROOT': 'www.dummyhttps://x'})
def about_page_marketing_site_https__edge_test(self):
""" Get URL for about page, only remove https:// at the beginning of the string. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), "//www.dummyhttps://x/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={})
def about_page_marketing_urls_not_set_test(self):
""" Error case. ENABLE_MKTG_SITE is True, but there is either no MKTG_URLS, or no MKTG_URLS Root property. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), None)
@override_settings(LMS_BASE=None)
def about_page_no_lms_base_test(self):
""" No LMS_BASE, nor is ENABLE_MKTG_SITE True """
self.assertEquals(self.get_about_page_link(), None)
def get_about_page_link(self):
""" create mock course and return the about page link """
course_key = SlashSeparatedCourseKey('mitX', '101', 'test')
return utils.get_lms_link_for_about_page(course_key)
def lms_link_test(self):
""" Tests get_lms_link_for_item. """
course_key = SlashSeparatedCourseKey('mitX', '101', 'test')
location = course_key.make_usage_key('vertical', 'contacting_us')
link = utils.get_lms_link_for_item(location, False)
self.assertEquals(link, "//localhost:8000/courses/mitX/101/test/jump_to/i4x://mitX/101/vertical/contacting_us")
# test preview
link = utils.get_lms_link_for_item(location, True)
self.assertEquals(
link,
"//preview/courses/mitX/101/test/jump_to/i4x://mitX/101/vertical/contacting_us"
)
# now test with the course' location
location = course_key.make_usage_key('course', 'test')
link = utils.get_lms_link_for_item(location)
self.assertEquals(link, "//localhost:8000/courses/mitX/101/test/jump_to/i4x://mitX/101/course/test")
class ExtraPanelTabTestCase(TestCase):
""" Tests adding and removing extra course tabs. """
def get_tab_type_dicts(self, tab_types):
""" Returns an array of tab dictionaries. """
if tab_types:
return [{'tab_type': tab_type} for tab_type in tab_types.split(',')]
else:
return []
def get_course_with_tabs(self, tabs=None):
""" Returns a mock course object with a tabs attribute. """
if tabs is None:
tabs = []
course = collections.namedtuple('MockCourse', ['tabs'])
if isinstance(tabs, basestring):
course.tabs = self.get_tab_type_dicts(tabs)
else:
course.tabs = tabs
return course
def test_add_extra_panel_tab(self):
""" Tests if a tab can be added to a course tab list. """
for tab_type in utils.EXTRA_TAB_PANELS.keys():
tab = utils.EXTRA_TAB_PANELS.get(tab_type)
# test adding with changed = True
for tab_setup in ['', 'x', 'x,y,z']:
course = self.get_course_with_tabs(tab_setup)
expected_tabs = copy.copy(course.tabs)
expected_tabs.append(tab)
changed, actual_tabs = utils.add_extra_panel_tab(tab_type, course)
self.assertTrue(changed)
self.assertEqual(actual_tabs, expected_tabs)
# test adding with changed = False
tab_test_setup = [
[tab],
[tab, self.get_tab_type_dicts('x,y,z')],
[self.get_tab_type_dicts('x,y'), tab, self.get_tab_type_dicts('z')],
[self.get_tab_type_dicts('x,y,z'), tab]]
for tab_setup in tab_test_setup:
course = self.get_course_with_tabs(tab_setup)
expected_tabs = copy.copy(course.tabs)
changed, actual_tabs = utils.add_extra_panel_tab(tab_type, course)
self.assertFalse(changed)
self.assertEqual(actual_tabs, expected_tabs)
def test_remove_extra_panel_tab(self):
""" Tests if a tab can be removed from a course tab list. """
for tab_type in utils.EXTRA_TAB_PANELS.keys():
tab = utils.EXTRA_TAB_PANELS.get(tab_type)
# test removing with changed = True
tab_test_setup = [
[tab],
[tab, self.get_tab_type_dicts('x,y,z')],
[self.get_tab_type_dicts('x,y'), tab, self.get_tab_type_dicts('z')],
[self.get_tab_type_dicts('x,y,z'), tab]]
for tab_setup in tab_test_setup:
course = self.get_course_with_tabs(tab_setup)
expected_tabs = [t for t in course.tabs if t != utils.EXTRA_TAB_PANELS.get(tab_type)]
changed, actual_tabs = utils.remove_extra_panel_tab(tab_type, course)
self.assertTrue(changed)
self.assertEqual(actual_tabs, expected_tabs)
# test removing with changed = False
for tab_setup in ['', 'x', 'x,y,z']:
course = self.get_course_with_tabs(tab_setup)
expected_tabs = copy.copy(course.tabs)
changed, actual_tabs = utils.remove_extra_panel_tab(tab_type, course)
self.assertFalse(changed)
self.assertEqual(actual_tabs, expected_tabs)
class CourseImageTestCase(TestCase):
"""Tests for course image URLs."""
def test_get_image_url(self):
"""Test image URL formatting."""
course = CourseFactory.create(org='edX', course='999')
url = utils.course_image_url(course)
self.assertEquals(url, '/c4x/edX/999/asset/{0}'.format(course.course_image))
def test_non_ascii_image_name(self):
# Verify that non-ascii image names are cleaned
course = CourseFactory.create(course_image=u'before_\N{SNOWMAN}_after.jpg')
self.assertEquals(
utils.course_image_url(course),
'/c4x/{org}/{course}/asset/before___after.jpg'.format(org=course.location.org, course=course.location.course)
)
def test_spaces_in_image_name(self):
# Verify that image names with spaces in them are cleaned
course = CourseFactory.create(course_image=u'before after.jpg')
self.assertEquals(
utils.course_image_url(course),
'/c4x/{org}/{course}/asset/before_after.jpg'.format(
org=course.location.org,
course=course.location.course
)
)
class XBlockVisibilityTestCase(TestCase):
"""Tests for xblock visibility for students."""
def setUp(self):
self.dummy_user = ModuleStoreEnum.UserID.test
self.past = datetime(1970, 1, 1)
self.future = datetime.now(UTC) + timedelta(days=1)
def test_private_unreleased_xblock(self):
"""Verifies that a private unreleased xblock is not visible"""
self._test_visible_to_students(False, 'private_unreleased', self.future)
def test_private_released_xblock(self):
"""Verifies that a private released xblock is not visible"""
self._test_visible_to_students(False, 'private_released', self.past)
def test_public_unreleased_xblock(self):
"""Verifies that a public (published) unreleased xblock is not visible"""
self._test_visible_to_students(False, 'public_unreleased', self.future, publish=True)
def test_public_released_xblock(self):
"""Verifies that public (published) released xblock is visible if staff lock is not enabled."""
self._test_visible_to_students(True, 'public_released', self.past, publish=True)
def test_private_no_start_xblock(self):
"""Verifies that a private xblock with no start date is not visible"""
self._test_visible_to_students(False, 'private_no_start', None)
def test_public_no_start_xblock(self):
"""Verifies that a public (published) xblock with no start date is visible unless staff lock is enabled"""
self._test_visible_to_students(True, 'public_no_start', None, publish=True)
def test_draft_released_xblock(self):
"""Verifies that a xblock with an unreleased draft and a released published version is visible"""
vertical = self._create_xblock_with_start_date('draft_released', self.past, publish=True)
# Create an unreleased draft version of the xblock
vertical.start = self.future
modulestore().update_item(vertical, self.dummy_user)
self.assertTrue(utils.is_currently_visible_to_students(vertical))
def _test_visible_to_students(self, expected_visible_without_lock, name, start_date, publish=False):
"""
Helper method that checks that is_xblock_visible_to_students returns the correct value both
with and without visible_to_staff_only set.
"""
no_staff_lock = self._create_xblock_with_start_date(name, start_date, publish, visible_to_staff_only=False)
self.assertEqual(expected_visible_without_lock, utils.is_currently_visible_to_students(no_staff_lock))
# any xblock with visible_to_staff_only set to True should not be visible to students.
staff_lock = self._create_xblock_with_start_date(
name + "_locked", start_date, publish, visible_to_staff_only=True
)
self.assertFalse(utils.is_currently_visible_to_students(staff_lock))
def _create_xblock_with_start_date(self, name, start_date, publish=False, visible_to_staff_only=False):
"""Helper to create an xblock with a start date, optionally publishing it"""
location = Location('edX', 'visibility', '2012_Fall', 'vertical', name)
vertical = modulestore().create_xmodule(location)
vertical.start = start_date
if visible_to_staff_only:
vertical.visible_to_staff_only = visible_to_staff_only
modulestore().update_item(vertical, self.dummy_user, allow_not_found=True)
if publish:
modulestore().publish(location, self.dummy_user)
return vertical
class ReleaseDateSourceTest(CourseTestCase):
"""Tests for finding the source of an xblock's release date."""
def setUp(self):
super(ReleaseDateSourceTest, self).setUp()
self.chapter = ItemFactory.create(category='chapter', parent_location=self.course.location)
self.sequential = ItemFactory.create(category='sequential', parent_location=self.chapter.location)
self.vertical = ItemFactory.create(category='vertical', parent_location=self.sequential.location)
# Read again so that children lists are accurate
self.chapter = self.store.get_item(self.chapter.location)
self.sequential = self.store.get_item(self.sequential.location)
self.vertical = self.store.get_item(self.vertical.location)
self.date_one = datetime(1980, 1, 1, tzinfo=UTC)
self.date_two = datetime(2020, 1, 1, tzinfo=UTC)
def _update_release_dates(self, chapter_start, sequential_start, vertical_start):
"""Sets the release dates of the chapter, sequential, and vertical"""
self.chapter.start = chapter_start
self.chapter = self.store.update_item(self.chapter, ModuleStoreEnum.UserID.test)
self.sequential.start = sequential_start
self.sequential = self.store.update_item(self.sequential, ModuleStoreEnum.UserID.test)
self.vertical.start = vertical_start
self.vertical = self.store.update_item(self.vertical, ModuleStoreEnum.UserID.test)
def _verify_release_date_source(self, item, expected_source):
"""Helper to verify that the release date source of a given item matches the expected source"""
source = utils.find_release_date_source(item)
self.assertEqual(source.location, expected_source.location)
self.assertEqual(source.start, expected_source.start)
def test_chapter_source_for_vertical(self):
"""Tests a vertical's release date being set by its chapter"""
self._update_release_dates(self.date_one, self.date_one, self.date_one)
self._verify_release_date_source(self.vertical, self.chapter)
def test_sequential_source_for_vertical(self):
"""Tests a vertical's release date being set by its sequential"""
self._update_release_dates(self.date_one, self.date_two, self.date_two)
self._verify_release_date_source(self.vertical, self.sequential)
def test_chapter_source_for_sequential(self):
"""Tests a sequential's release date being set by its chapter"""
self._update_release_dates(self.date_one, self.date_one, self.date_one)
self._verify_release_date_source(self.sequential, self.chapter)
def test_sequential_source_for_sequential(self):
"""Tests a sequential's release date being set by itself"""
self._update_release_dates(self.date_one, self.date_two, self.date_two)
self._verify_release_date_source(self.sequential, self.sequential)
| nicky-ji/edx-nicky | cms/djangoapps/contentstore/tests/test_utils.py | Python | agpl-3.0 | 15,281 |
# Generated by Django 2.2.2 on 2019-06-27 23:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('part', '0012_auto_20190627_2144'),
]
operations = [
migrations.AlterField(
model_name='bomitem',
name='part',
field=models.ForeignKey(help_text='Select parent part', limit_choices_to={'assembly': True}, on_delete=django.db.models.deletion.CASCADE, related_name='bom_items', to='part.Part'),
),
migrations.AlterField(
model_name='bomitem',
name='sub_part',
field=models.ForeignKey(help_text='Select part to be used in BOM', limit_choices_to={'component': True}, on_delete=django.db.models.deletion.CASCADE, related_name='used_in', to='part.Part'),
),
]
| inventree/InvenTree | InvenTree/part/migrations/0013_auto_20190628_0951.py | Python | mit | 865 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# Croc Bauges - Print Product module for Odoo
# Copyright (C) 2015-Today GRAP (http://www.grap.coop)
# @author Sylvain LE GAL (https://twitter.com/legalsylvain)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv.orm import TransientModel
from openerp.osv import fields
class print_product_wizard(TransientModel):
_name = 'print.product.wizard'
# Fields Function Section
def _get_print_type_id(self, cr, uid, context=None):
ppt_obj = self.pool['print.product.type']
ppt_ids = ppt_obj.search(
cr, uid, [], limit=1, order='sequence desc, id', context=context)
if ppt_ids:
return ppt_ids[0]
else:
return False
def _get_product_id(self, cr, uid, context=None):
return context.get('active_id', False)
_columns = {
'print_type_id': fields.many2one(
'print.product.type', required=True, string='Print Configuration'),
'product_id': fields.many2one(
'product.product', readonly=True, required=True, string='Product'),
}
# Default values Section
_defaults = {
'print_type_id': _get_print_type_id,
'product_id': _get_product_id,
}
| grap/odoo-addons-crb | crb_print_product/models/print_product_wizard.py | Python | agpl-3.0 | 2,045 |
from TimeSeries import SessionFactory
class TestSessionFactory:
def setup(self):
self.connection_string = "sqlite:///:memory:"
self.session_factory = SessionFactory(self.connection_string, echo=True)
def test_create_session_factory(self):
assert repr(self.session_factory) == "<SessionFactory('Engine(%s)')>" % self.connection_string
assert self.session_factory.Session != None
def test_get_session(self):
session = self.session_factory.getSession()
assert 'sqlalchemy.orm.session.SessionMaker' in repr(session) | Castronova/EMIT | api_old/ODM1_1_1/tests/data_tests/test_session_factory.py | Python | gpl-2.0 | 530 |
""" Store packages in S3 """
import logging
from binascii import hexlify
from contextlib import contextmanager
from hashlib import md5
from io import BytesIO
from urllib.request import urlopen
from pyramid.httpexceptions import HTTPFound
from pyramid.settings import asbool
from pypicloud.models import Package
from .base import IStorage
LOG = logging.getLogger(__name__)
class ObjectStoreStorage(IStorage):
"""Storage backend base class containing code that is common between
supported object stores (S3 / GCS)
"""
test = False
def __init__(
self,
request=None,
expire_after=None,
bucket_prefix=None,
prepend_hash=None,
redirect_urls=None,
sse=None,
object_acl=None,
storage_class=None,
region_name=None,
public_url=False,
**kwargs
):
super(ObjectStoreStorage, self).__init__(request, **kwargs)
self.expire_after = expire_after
self.bucket_prefix = bucket_prefix
self.prepend_hash = prepend_hash
self.redirect_urls = redirect_urls
self.sse = sse
self.object_acl = object_acl
self.storage_class = storage_class
self.region_name = region_name
self.public_url = public_url
def _generate_url(self, package: Package) -> str:
"""Subclasses must implement a method for generating signed URLs to
the package in the object store
"""
raise NotImplementedError
@classmethod
def package_from_object(cls, obj, factory):
"""Subclasses must implement a method for constructing a Package
instance from the backend's storage object format
"""
raise NotImplementedError
@classmethod
def _subclass_specific_config(cls, settings, common_config):
"""Method to allow subclasses to extract configuration parameters
specific to them and not covered in the common configuration
in this class.
"""
return {}
@classmethod
def configure(cls, settings):
kwargs = super(ObjectStoreStorage, cls).configure(settings)
kwargs["expire_after"] = int(settings.get("storage.expire_after", 60 * 60 * 24))
kwargs["bucket_prefix"] = settings.get("storage.prefix", "")
kwargs["prepend_hash"] = asbool(settings.get("storage.prepend_hash", True))
kwargs["object_acl"] = settings.get("storage.object_acl", None)
kwargs["storage_class"] = storage_class = settings.get("storage.storage_class")
kwargs["redirect_urls"] = asbool(settings.get("storage.redirect_urls", True))
kwargs["region_name"] = settings.get("storage.region_name")
kwargs["public_url"] = asbool(settings.get("storage.public_url"))
kwargs.update(cls._subclass_specific_config(settings, kwargs))
return kwargs
def calculate_path(self, package):
"""Calculates the path of a package"""
path = package.name + "/" + package.filename
if self.prepend_hash:
m = md5()
m.update(package.filename.encode("utf-8"))
prefix = hexlify(m.digest()).decode("utf-8")[:4]
path = prefix + "/" + path
return path
def get_path(self, package):
"""Get the fully-qualified bucket path for a package"""
if "path" not in package.data:
filename = self.calculate_path(package)
package.data["path"] = self.bucket_prefix + filename
return package.data["path"]
def get_url(self, package):
if self.redirect_urls:
return super(ObjectStoreStorage, self).get_url(package)
else:
return self._generate_url(package)
def download_response(self, package):
return HTTPFound(location=self._generate_url(package))
@contextmanager
def open(self, package):
url = self._generate_url(package)
handle = urlopen(url)
try:
yield BytesIO(handle.read())
finally:
handle.close()
| stevearc/pypicloud | pypicloud/storage/object_store.py | Python | mit | 4,039 |
# Copyright 2015 Kevin B Jacobs
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Simple class for working with records from BED files."""
from collections import defaultdict
from operator import attrgetter
from itertools import groupby
from .smartfile import smartfile
class BedRecord:
"""Simple class for working with records from BED files."""
__slots__ = ('contig', 'start', 'stop', 'name', 'score', 'strand', 'thick_start', 'thick_end', 'item_rgb')
field_names = __slots__
def __init__(self, contig, start, stop, name=None, score=None, strand=None, thick_start=None,
thick_end=None, item_rgb=None):
"""Build a new BedRecord."""
self.contig = contig
self.start = start
self.stop = stop
self.name = name
self.score = score
self.strand = strand
self.thick_start = thick_start
self.thick_end = thick_end
self.item_rgb = item_rgb
@staticmethod
def from_line(line):
"""Build a BedRecord from a string."""
line = line.rstrip()
fields = line.split('\t')
contig, start, stop = fields[0], int(fields[1]), int(fields[2])
n = len(fields)
name = fields[3] or None if n >= 4 else None
score = fields[4] or None if n >= 5 else None
strand = fields[5] or None if n >= 6 else None
thick_start = fields[6] or None if n >= 7 else None
thick_end = fields[7] or None if n >= 8 else None
item_rgb = fields[8] or None if n >= 9 else None
return BedRecord(contig, start, stop, name, score, strand, thick_start, thick_end, item_rgb)
def to_tuple(self):
"""Convert BedRecord to a tuple."""
return (self.contig, self.start, self.stop, self.name, self.score,
self.strand, self.thick_start, self.thick_end, self.item_rgb)
def to_line(self):
"""Convert BedRecord to a BED line."""
line = '\t'.join(str(f if f is not None else '') for f in self.to_tuple())
return line.rstrip()
def __repr__(self):
"""Return the string representation of this BedRecord."""
fields = (f'{k}={v!r}' for k, v in zip(self.field_names, self.to_tuple()) if v not in (None, ''))
return 'BedRecord(%s)' % ', '.join(fields)
class BedFile:
"""Simple class for iterating through the records of a BED file."""
def __init__(self, filename):
"""Open a BedFile."""
self.filename = filename
self._tabix = None
def __iter__(self):
"""Iterate over rows of this BedFile."""
return BedFile.parse_bed_lines(smartfile(self.filename))
@property
def tabix(self):
"""Return a tabix index for this BedFile."""
if self._tabix:
return self._tabix
import pysam
self._tabix = pysam.Tabixfile(self.filename)
return self._tabix
def query(self, contig=None, start=None, stop=None):
"""Query the tabix index for this BedFile."""
records = self.tabix.fetch(contig, start, stop)
return BedFile.parse_bed_lines(records)
@property
def contigs(self):
"""Return the contigs stored in this BedFile."""
return self.tabix.contigs
@staticmethod
def parse_bed_lines(lines):
"""Parse lines into BedRecords."""
for line in lines:
line = line.rstrip()
if not line or line.startswith(';') or line.startswith('track '):
continue
yield BedRecord.from_line(line)
def load_bedmap(filename):
"""Load BED file as a dictionary mapping contig to list of BedRecords.
Args:
filename (str): input filename
Returns:
dict: dictionary mapping contig to list of BedRecords
"""
bed = sorted(BedFile(filename), key=attrgetter('contig', 'start', 'stop'))
bedmap = defaultdict(list)
for contig, contig_records in groupby(bed, attrgetter('contig')):
bedmap[contig] = list(contig_records)
return dict(bedmap)
| bioinformed/vgraph | vgraph/bed.py | Python | apache-2.0 | 4,566 |
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
admin.autodiscover()
"""
Basic, reusable patterns
"""
faculty = r'(?P<faculty>\w+)'
department = r'(?P<department>\w{4})'
number = '(?P<number>\d{3}[DNJ]?[123]?)'
course = department + '_' + number
page_type = '(?P<page_type>[^/]+)'
semester = '(?P<term>\w{4,6})-(?P<year>\d{4})'
slug = '(?P<slug>[^/]+)'
page = course + '/' + page_type + '/' + semester + '/' + slug
sha = '(?P<hash>[a-z0-9]{40})'
professor = '(?P<professor>[a-z-]*)'
"""
Begin mappings (URLs should be defined in order of descending priority (so highest priority first))
"""
direct_to_view = (
('main', (
('login', 'login_logout'),
('recent', 'recent'),
('recent/(?P<num_days>\d+)', 'recent'),
('recent/all', 'all_recent'),
('recent/all/(?P<num_days>\d+)', 'all_recent'),
('ucp', 'ucp'),
('ucp/(?P<mode>\w*)', 'ucp'),
('users/(?P<username>\w+)', 'profile'),
('users/(?P<username>\w+)/contributions', 'contributions'),
('search', 'search'),
('markdown', 'markdown'),
('register', 'register')
)),
('messages', (
('messages', 'inbox'),
('messages/inbox', 'inbox'),
('messages/outbox', 'outbox'),
('messages/compose', 'compose'),
('messages/view/(?P<message_id>\d+)', 'view'),
)),
('news', (
('news', 'main'),
('news/' + slug, 'view'),
)),
('pages', (
('pages/random', 'random'),
(course + '/create/' + page_type, 'create'),
(page, 'show'),
(page + '/edit', 'edit'),
(page + '/history', 'history'),
(page + '/print', 'printview'),
#(page + '/raw', 'raw'),
(page + '/commit/' + sha, 'commit'),
#(page + '/inline', 'inline'),
)),
('courses', (
('courses', 'index'),
('courses/create', 'create'),
('courses/all', 'all_browse'),
('courses/faculty', 'faculty_browse'),
('courses/department', 'department_browse'),
('courses/professor', 'professor_browse'),
('courses/popular', 'popular_browse'),
('courses/random', 'random'),
('courses/active', 'active_browse'),
('courses/get_all', 'get_all'),
# Redirect department/number to department_number
(department + '/' + number + '.*', 'remove_slash'),
(course, 'overview'),
(course + '/recent', 'recent'),
(course + '/watch', 'watch'),
(course + '/pin', 'pin'),
(course + '/unpin', 'unpin'),
(course + '/' + semester, 'semester_overview'),
(course + '/' + page_type, 'category_overview'),
(department, 'department_overview'),
('faculty/' + faculty, 'faculty_overview'),
('professor/' + professor, 'professor_overview'),
)),
)
# Maps straight from about/history to the static view in main.py
static_urls = {
'about': ['history', 'licensing', 'platform'], # the index one is implicit
'contributing': ['moderating', 'development', 'content', 'guidelines'],
'help': ['copyright', 'formatting', 'lexers'],
}
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
)
"""
Begin code for mapping the mappings
"""
# The index view has to be done separately
urlpatterns += patterns('',
url(r'^$', 'views.main.index', name='home'),
)
for prefix, filenames in static_urls.iteritems():
index_url = url(r'^' + prefix + '(?:/overview)?/$', 'views.main.static',
{'mode': prefix, 'page': 'overview'}, name=prefix)
urls = [url(r'^' + prefix + '/' + filename + '/$', 'views.main.static',
{'mode': prefix, 'page': filename},
name=prefix + '_' + filename) for filename in filenames]
urlpatterns += patterns('', index_url, *urls)
for prefix, mapping in direct_to_view:
urls = [url('^' + regex + '/$', view, name='%s_%s' % (prefix, view)) for regex, view in mapping]
urlpatterns += patterns('views.' + prefix, *urls)
| dellsystem/wikinotes | urls.py | Python | gpl-3.0 | 4,019 |
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os.path
import nose.tools as nose
import yvs.filter_refs as yvs
from tests import set_up, tear_down
from tests.decorators import use_user_prefs
@nose.with_setup(set_up, tear_down)
@use_user_prefs({'language': 'eng', 'version': 59, 'copybydefault': False})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
nose.assert_equal(len(results), 1)
@nose.with_setup(set_up, tear_down)
@use_user_prefs({'language': 'spa', 'version': 128, 'copybydefault': False})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
nose.assert_equal(len(results), 1)
@nose.with_setup(set_up, tear_down)
def test_missing_prefs():
"""should supply missing preferences with defaults"""
yvs.core.set_user_prefs({})
results = yvs.get_result_list('mat 5.3')
nose.assert_equal(len(results), 1)
@nose.with_setup(set_up, tear_down)
@use_user_prefs({'language': 'eng', 'version': 999, 'copybydefault': False})
def test_invalid_user_version():
"""should raise exception when invalid version is set"""
with nose.assert_raises(Exception):
yvs.get_result_list('ph 4')
@nose.with_setup(set_up, tear_down)
@use_user_prefs({'language': 'eng', 'version': 111, 'copybydefault': False})
def test_copy_by_default_false():
"""should export correct data when "Copy By Default?" setting is false"""
results = yvs.get_result_list('mat 5.3')
nose.assert_equal(results[0]['variables']['copybydefault'], 'False')
nose.assert_equal(results[0]['subtitle'], 'View on YouVersion')
nose.assert_equal(
results[0]['mods']['cmd']['subtitle'], 'Copy content to clipboard')
@nose.with_setup(set_up, tear_down)
@use_user_prefs({'language': 'eng', 'version': 111, 'copybydefault': True})
def test_copy_by_default_true():
"""should export correct data when "Copy By Default?" setting is true"""
results = yvs.get_result_list('mat 5.3')
nose.assert_equal(results[0]['variables']['copybydefault'], 'True')
nose.assert_equal(results[0]['subtitle'], 'Copy content to clipboard')
nose.assert_equal(
results[0]['mods']['cmd']['subtitle'], 'View on YouVersion')
@nose.with_setup(set_up, tear_down)
def test_create_local_data_dir_silent_fail():
"""should silently fail if local data directory already exists"""
yvs.core.create_local_data_dir()
yvs.core.create_local_data_dir()
nose.assert_true(
os.path.exists(yvs.core.LOCAL_DATA_DIR_PATH),
'local data directory does not exist')
@nose.with_setup(set_up, tear_down)
def test_prettified_prefs_json():
yvs.core.set_user_prefs({
'language': 'spa',
'version': 128,
'refformat': '{name}\n{content}'
})
with open(yvs.core.get_user_prefs_path(), 'r') as user_prefs_file:
user_prefs_json = user_prefs_file.read()
nose.assert_in('\n ', user_prefs_json,
'User prefs JSON is not prettified')
| caleb531/youversion-suggest | tests/test_filter_refs/test_prefs.py | Python | mit | 3,247 |
import requests
import math
import json
from os.path import expanduser
from datetime import datetime
from bs4 import BeautifulSoup
GOOGLE_API_KEY = "AIzaSyC79GdoRDJXfeWDQnx5bBr14I3HJgEBIH0"
def get_current_coordinates():
"""
Returns the current latitude and longitude
defined by IP address
"""
try:
response = requests.get("http://ip-api.com/json")
data = response.json()
coordinates = (data['lat'], data['lon'])
except requests.ConnectionError:
print("Проблем с интернет връзката.")
coordinates = (0, 0)
return coordinates
def transform_html_directions(data):
arr_instr = []
# transform to text or put into json
for leg in data['routes'][0]['legs']:
for step in leg['steps']:
arr_instr.append(step['html_instructions'])
arr_instr.append(step['duration']['text'])
arr_instr.append(step['distance']['text'])
instructions = "\n".join(arr_instr)
without_html = BeautifulSoup(instructions, 'html.parser')
return without_html.get_text()
def get_duration(destination_lat, destination_lon):
"""
Requires destination latitude and longitude
Returns travel duration from the current position.
If the status code of the response is not OK returns math.inf.
"""
current_position = get_current_coordinates()
url = ("https://maps.googleapis.com/maps/api/directions/"
"json?origin={},{}&destination={},{}&key={}&"
"language=bg&traffic_model").format(
current_position[0], current_position[1],
destination_lat, destination_lon, GOOGLE_API_KEY)
try:
response = requests.get(url)
data = response.json()
except requests.ConnectionError:
print("Проблем с интернет връзката.")
return math.inf
if data['status'] == 'OK':
return data['routes'][0]['legs'][0]['duration']['value']
return math.inf
def get_duration_from_address(address, destination_lat, destination_lon):
"""
Requires address (str) from which to calculate the travel duration and
latitude and longitude of the destination
"""
print(address)
url = ("https://maps.googleapis.com/maps/api/directions/"
"json?origin={}&destination={},{}&key={}&"
"language=bg&traffic_model").format(
address, destination_lat, destination_lon, GOOGLE_API_KEY)
try:
response = requests.get(url)
except requests.ConnectionError:
print("Проблем с интернет връзката.")
return math.inf
data = response.json()
if data['status'] == 'OK':
return data['routes'][0]['legs'][0]['duration']['value']
return math.inf
def directions(destination_lat, destination_lon, address):
if address is None:
current_position = get_current_coordinates()
url = ("https://maps.googleapis.com/maps/api/directions/"
"json?units=metric&origin={},{}&destination={},{}&key={}&"
"language=bg".format(current_position[0],
current_position[1], destination_lat,
destination_lon, GOOGLE_API_KEY))
try:
response = requests.get(url)
except requests.ConnectionError:
print("Проблем с интернет връзката")
return
else:
url = ("https://maps.googleapis.com/maps/api/directions/"
"json?units=metric&origin={}&destination={},{}&key={}&"
"language=bg".format(address, destination_lat,
destination_lon, GOOGLE_API_KEY))
try:
response = requests.get(url)
except requests.ConnectionError:
print("Проблем с интернет връзката.")
return
data = response.json()
if data['status'] == 'OK':
instructions = transform_html_directions(data)
text_filename = "{}/{}_text.txt".format(
expanduser("~"), str(datetime.now()).replace(":", "-"))
with open(text_filename, 'w') as text_file:
text_file.write(instructions)
filename = "{}/{}.json".format(
expanduser("~"), str(datetime.now()).replace(":", "-"))
with open(filename, 'w') as json_file:
json.dump(data, json_file)
print(("В {} има записан json файл с инструкциите "
"и текстов в {}").format(filename, text_filename))
return
else:
print("Има проблем с инструкциите. Моля, опитайте отново.")
return
| georgi-gi/where2Go | where2Go/directions_and_durations.py | Python | gpl-2.0 | 4,747 |
from __future__ import unicode_literals
import base64
from hashlib import sha1
import hmac
import time
import uuid
from django.conf import settings
from django.contrib.auth import authenticate
from django.core.exceptions import ImproperlyConfigured
from django.middleware.csrf import _sanitize_token, constant_time_compare
from django.utils.six.moves.urllib.parse import urlparse
from django.utils.translation import ugettext as _
from tastypie.compat import get_user_model, get_username_field
from tastypie.http import HttpUnauthorized
try:
import python_digest
except ImportError:
python_digest = None
try:
import oauth2
except ImportError:
oauth2 = None
try:
import oauth_provider
except ImportError:
oauth_provider = None
def same_origin(url1, url2):
"""
Checks if two URLs are 'same-origin'
"""
PROTOCOL_TO_PORT = {
'http': 80,
'https': 443,
}
p1, p2 = urlparse(url1), urlparse(url2)
try:
o1 = (p1.scheme, p1.hostname, p1.port or PROTOCOL_TO_PORT[p1.scheme])
o2 = (p2.scheme, p2.hostname, p2.port or PROTOCOL_TO_PORT[p2.scheme])
return o1 == o2
except (ValueError, KeyError):
return False
class Authentication(object):
"""
A simple base class to establish the protocol for auth.
By default, this indicates the user is always authenticated.
"""
def __init__(self, require_active=True):
self.require_active = require_active
def is_authenticated(self, request, **kwargs):
"""
Identifies if the user is authenticated to continue or not.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
return True
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns a combination of IP address and hostname.
"""
return "%s_%s" % (request.META.get('REMOTE_ADDR', 'noaddr'), request.META.get('REMOTE_HOST', 'nohost'))
def check_active(self, user):
"""
Ensures the user has an active account.
Optimized for the ``django.contrib.auth.models.User`` case.
"""
if not self.require_active:
# Ignore & move on.
return True
return user.is_active
class BasicAuthentication(Authentication):
"""
Handles HTTP Basic auth against a specific auth backend if provided,
or against all configured authentication backends using the
``authenticate`` method from ``django.contrib.auth``.
Optional keyword arguments:
``backend``
If specified, use a specific ``django.contrib.auth`` backend instead
of checking all backends specified in the ``AUTHENTICATION_BACKENDS``
setting.
``realm``
The realm to use in the ``HttpUnauthorized`` response. Default:
``django-tastypie``.
"""
def __init__(self, backend=None, realm='django-tastypie', **kwargs):
super(BasicAuthentication, self).__init__(**kwargs)
self.backend = backend
self.realm = realm
def _unauthorized(self):
response = HttpUnauthorized()
# FIXME: Sanitize realm.
response['WWW-Authenticate'] = 'Basic Realm="%s"' % self.realm
return response
def is_authenticated(self, request, **kwargs):
"""
Checks a user's basic auth credentials against the current
Django auth backend.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
if not request.META.get('HTTP_AUTHORIZATION'):
return self._unauthorized()
try:
(auth_type, data) = request.META['HTTP_AUTHORIZATION'].split()
if auth_type.lower() != 'basic':
return self._unauthorized()
user_pass = base64.b64decode(data).decode('utf-8')
except:
return self._unauthorized()
bits = user_pass.split(':', 1)
if len(bits) != 2:
return self._unauthorized()
if self.backend:
user = self.backend.authenticate(username=bits[0], password=bits[1])
else:
user = authenticate(username=bits[0], password=bits[1])
if user is None:
return self._unauthorized()
if not self.check_active(user):
return False
request.user = user
return True
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's basic auth username.
"""
return request.META.get('REMOTE_USER', 'nouser')
class ApiKeyAuthentication(Authentication):
"""
Handles API key auth, in which a user provides a username & API key.
Uses the ``ApiKey`` model that ships with tastypie. If you wish to use
a different model, override the ``get_key`` method to perform the key check
as suits your needs.
"""
def _unauthorized(self):
return HttpUnauthorized()
def extract_credentials(self, request):
authorization = request.META.get('HTTP_AUTHORIZATION', '')
if authorization and authorization.lower().startswith('apikey '):
auth_type, data = authorization.split()
username, api_key = data.split(':', 1)
else:
username = request.GET.get('username') or request.POST.get('username')
api_key = request.GET.get('api_key') or request.POST.get('api_key')
return username, api_key
def is_authenticated(self, request, **kwargs):
"""
Finds the user and checks their API key.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
try:
username, api_key = self.extract_credentials(request)
except ValueError:
return self._unauthorized()
if not username or not api_key:
return self._unauthorized()
username_field = get_username_field()
User = get_user_model()
lookup_kwargs = {username_field: username}
try:
user = User.objects.select_related('api_key').get(**lookup_kwargs)
except (User.DoesNotExist, User.MultipleObjectsReturned):
return self._unauthorized()
if not self.check_active(user):
return False
key_auth_check = self.get_key(user, api_key)
if key_auth_check and not isinstance(key_auth_check, HttpUnauthorized):
request.user = user
return key_auth_check
def get_key(self, user, api_key):
"""
Attempts to find the API key for the user. Uses ``ApiKey`` by default
but can be overridden.
"""
from tastypie.models import ApiKey
try:
if user.api_key.key != api_key:
return self._unauthorized()
except ApiKey.DoesNotExist:
return self._unauthorized()
return True
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's username.
"""
username, api_key = self.extract_credentials(request)
return username or 'nouser'
class SessionAuthentication(Authentication):
"""
An authentication mechanism that piggy-backs on Django sessions.
This is useful when the API is talking to Javascript on the same site.
Relies on the user being logged in through the standard Django login
setup.
Requires a valid CSRF token.
"""
def is_authenticated(self, request, **kwargs):
"""
Checks to make sure the user is logged in & has a Django session.
"""
# Cargo-culted from Django 1.3/1.4's ``django/middleware/csrf.py``.
# We can't just use what's there, since the return values will be
# wrong.
# We also can't risk accessing ``request.POST``, which will break with
# the serialized bodies.
if request.method in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
return request.user.is_authenticated()
if getattr(request, '_dont_enforce_csrf_checks', False):
return request.user.is_authenticated()
csrf_token = _sanitize_token(request.COOKIES.get(settings.CSRF_COOKIE_NAME, ''))
if request.is_secure():
referer = request.META.get('HTTP_REFERER')
if referer is None:
return False
good_referer = 'https://%s/' % request.get_host()
if not same_origin(referer, good_referer):
return False
request_csrf_token = request.META.get('HTTP_X_CSRFTOKEN', '')
if not constant_time_compare(request_csrf_token, csrf_token):
return False
return request.user.is_authenticated()
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's username.
"""
return getattr(request.user, get_username_field())
class DigestAuthentication(Authentication):
"""
Handles HTTP Digest auth against a specific auth backend if provided,
or against all configured authentication backends using the
``authenticate`` method from ``django.contrib.auth``. However, instead of
the user's password, their API key should be used.
Optional keyword arguments:
``backend``
If specified, use a specific ``django.contrib.auth`` backend instead
of checking all backends specified in the ``AUTHENTICATION_BACKENDS``
setting.
``realm``
The realm to use in the ``HttpUnauthorized`` response. Default:
``django-tastypie``.
"""
def __init__(self, backend=None, realm='django-tastypie', **kwargs):
super(DigestAuthentication, self).__init__(**kwargs)
self.backend = backend
self.realm = realm
if python_digest is None:
raise ImproperlyConfigured("The 'python_digest' package could not be imported. It is required for use with the 'DigestAuthentication' class.")
def _unauthorized(self):
response = HttpUnauthorized()
new_uuid = uuid.uuid4()
opaque = hmac.new(str(new_uuid).encode('utf-8'), digestmod=sha1).hexdigest()
response['WWW-Authenticate'] = python_digest.build_digest_challenge(
timestamp=time.time(),
secret=settings.SECRET_KEY,
realm=self.realm,
opaque=opaque,
stale=False
)
return response
def is_authenticated(self, request, **kwargs):
"""
Finds the user and checks their API key.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
if not request.META.get('HTTP_AUTHORIZATION'):
return self._unauthorized()
try:
auth_type, data = request.META['HTTP_AUTHORIZATION'].split(' ', 1)
if auth_type.lower() != 'digest':
return self._unauthorized()
except:
return self._unauthorized()
digest_response = python_digest.parse_digest_credentials(request.META['HTTP_AUTHORIZATION'])
# FIXME: Should the nonce be per-user?
if not python_digest.validate_nonce(digest_response.nonce, settings.SECRET_KEY):
return self._unauthorized()
user = self.get_user(digest_response.username)
api_key = self.get_key(user)
if user is False or api_key is False:
return self._unauthorized()
expected = python_digest.calculate_request_digest(
request.method,
python_digest.calculate_partial_digest(digest_response.username, self.realm, api_key),
digest_response)
if not digest_response.response == expected:
return self._unauthorized()
if not self.check_active(user):
return False
request.user = user
return True
def get_user(self, username):
username_field = get_username_field()
User = get_user_model()
try:
lookup_kwargs = {username_field: username}
user = User.objects.get(**lookup_kwargs)
except (User.DoesNotExist, User.MultipleObjectsReturned):
return False
return user
def get_key(self, user):
"""
Attempts to find the API key for the user. Uses ``ApiKey`` by default
but can be overridden.
Note that this behaves differently than the ``ApiKeyAuthentication``
method of the same name.
"""
from tastypie.models import ApiKey
try:
key = ApiKey.objects.get(user=user)
except ApiKey.DoesNotExist:
return False
return key.key
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's username.
"""
if hasattr(request, 'user'):
if hasattr(request.user, 'username'):
return request.user.username
return 'nouser'
class OAuthAuthentication(Authentication):
"""
Handles OAuth, which checks a user's credentials against a separate service.
Currently verifies against OAuth 1.0a services.
This does *NOT* provide OAuth authentication in your API, strictly
consumption.
"""
def __init__(self, **kwargs):
super(OAuthAuthentication, self).__init__(**kwargs)
if oauth2 is None:
raise ImproperlyConfigured("The 'python-oauth2' package could not be imported. It is required for use with the 'OAuthAuthentication' class.")
if oauth_provider is None:
raise ImproperlyConfigured("The 'django-oauth-plus' package could not be imported. It is required for use with the 'OAuthAuthentication' class.")
def is_authenticated(self, request, **kwargs):
from oauth_provider.store import store
if self.is_valid_request(request):
oauth_request = oauth_provider.utils.get_oauth_request(request)
consumer = store.get_consumer(request, oauth_request, oauth_request.get_parameter('oauth_consumer_key'))
try:
token = store.get_access_token(request, oauth_request, consumer, oauth_request.get_parameter('oauth_token'))
except oauth_provider.store.InvalidTokenError:
return oauth_provider.utils.send_oauth_error(oauth2.Error(_('Invalid access token: %s') % oauth_request.get_parameter('oauth_token')))
try:
self.validate_token(request, consumer, token)
except oauth2.Error as e:
return oauth_provider.utils.send_oauth_error(e)
if consumer and token:
if not self.check_active(token.user):
return False
request.user = token.user
return True
return oauth_provider.utils.send_oauth_error(oauth2.Error(_('You are not allowed to access this resource.')))
return oauth_provider.utils.send_oauth_error(oauth2.Error(_('Invalid request parameters.')))
def is_in(self, params):
"""
Checks to ensure that all the OAuth parameter names are in the
provided ``params``.
"""
from oauth_provider.consts import OAUTH_PARAMETERS_NAMES
for param_name in OAUTH_PARAMETERS_NAMES:
if param_name not in params:
return False
return True
def is_valid_request(self, request):
"""
Checks whether the required parameters are either in the HTTP
``Authorization`` header sent by some clients (the preferred method
according to OAuth spec) or fall back to ``GET/POST``.
"""
auth_params = request.META.get("HTTP_AUTHORIZATION", [])
return self.is_in(auth_params) or self.is_in(request.REQUEST)
def validate_token(self, request, consumer, token):
oauth_server, oauth_request = oauth_provider.utils.initialize_server_request(request)
return oauth_server.verify_request(oauth_request, consumer, token)
class MultiAuthentication(object):
"""
An authentication backend that tries a number of backends in order.
"""
def __init__(self, *backends, **kwargs):
super(MultiAuthentication, self).__init__(**kwargs)
self.backends = backends
def is_authenticated(self, request, **kwargs):
"""
Identifies if the user is authenticated to continue or not.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
unauthorized = False
for backend in self.backends:
check = backend.is_authenticated(request, **kwargs)
if check:
if isinstance(check, HttpUnauthorized):
unauthorized = unauthorized or check
else:
request._authentication_backend = backend
return check
return unauthorized
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns a combination of IP address and hostname.
"""
try:
return request._authentication_backend.get_identifier(request)
except AttributeError:
return 'nouser'
| ocadotechnology/django-tastypie | tastypie/authentication.py | Python | bsd-3-clause | 17,744 |
# -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
import mlpy
import matplotlib.pyplot as plot
import matplotlib.cm as cm
from random import randrange as rr
DEBUG = True if __name__ == '__main__' else False
def random_timesequence(start, end, steps=3):
seq = []
for n in range(start, end):
# Randomize the number of sub-steps,
# but maintain the bounds and monotonicity
# (e.g. 0, 0, 1, 1, 1, 2, 3, 3, 3)
for i in range(rr(0, steps)):
seq.append(n)
return seq
if DEBUG:
with Section('Dynamic Time Warping algorithm - MLPY'):
# Using MLPY:
# First, make sure deps are setup.
# `brew install gsl`
# Download from SF: http://mlpy.sourceforge.net/
# Then install using setup.py:
# `cd MLPY_PATH/setup.py install`
# Now this makes it fun.
x, y = random_timesequence(0, 10), random_timesequence(0, 10)
# Taken from examples: http://mlpy.sourceforge.net/docs/3.5/dtw.html#id3
distance, cost, path = mlpy.dtw_std(x, y, dist_only=False)
fig = plot.figure(1)
axes = fig.add_subplot(111)
plot1 = plot.imshow(
cost.T, origin='lower', cmap=cm.gray, interpolation='nearest')
plot2 = plot.plot(path[0], path[1], 'w')
bound = 0.5
xlim = axes.set_xlim((-bound, cost.shape[0] - bound))
ylim = axes.set_ylim((-bound, cost.shape[1] - bound))
plot.show()
| christabor/MoAL | MOAL/algorithms/time_series/dynamic_timewarping.py | Python | apache-2.0 | 1,635 |
from osvr.ClientKitRaw import *
class Interface:
"""Interface object"""
def __init__(self, iface, ctx):
"""Initializes an interface object."""
self.interface = iface
self.context = ctx
self.freed = False
def registerCallback(self, cb, userdata):
"""Registers a callback to the interface."""
if isinstance(cb, OSVR_PoseCallback):
osvrRegisterPoseCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_PositionCallback):
osvrRegisterPositionCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_ButtonCallback):
osvrRegisterButtonCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_AnalogCallback):
osvrRegisterAnalogCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_Location2DCallback):
osvrRegisterLocation2DCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_DirectionCallback):
osvrRegisterDirectionCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_EyeTracker2DCallback):
osvrRegisterEyetracker2DCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_EyeTracker3DCallback):
osvrRegisterEyeTracker3DCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_EyeTrackerBlinkCallback):
osvrRegisterEyeTrackerBlinkCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_NaviVelocityCallback):
osvrRegisterNaviVelocityCallback(self.interface, cb, userdata)
if isinstance(cb, OSVR_NaviPositionCallback):
osvrRegisterNaviPositionCallback(self.interface, cb, userdata)
def getPoseState(self):
"""Returns the pose state of the interface."""
return osvrGetPoseState(self.interface)
def getPositionState(self):
"""Returns the position state of the interface."""
return osvrGetPositionState(self.interface)
def getOrientationState(self):
"""Returns the orientation state of the interface."""
return osvrGetOrientationState(self.interface)
def getButtonState(self):
"""Returns the button state of the interface."""
return osvrGetButtonState(self.interface)
def getAnalogState(self):
"""Returns the analog state of the interface."""
return osvrGetAnalogState(self.interface)
def getLocation2DState(self):
"""Returns the location 2D state of the interface."""
return osvrGetLocation2DState(self.interface)
def getDirectionState(self):
"""Returns the direction state of the interface."""
return osvrGetDirectionState(self.interface)
def getEyeTracker2DState(self):
"""Returns the eye tracker 2D state of the interface."""
return osvrGetEyeTracker2DState(self.interface)
def getEyeTracker3DState(self):
"""Returns the eye tracker 3D state of the interface."""
return osvrGetEyeTracker3DState(self.interface)
def getEyeTrackerBlinkState(self):
"""Returns the eye tracker blink state of the interface."""
return osvrGetEyeTrackerBlinkState(self.interface)
def getNaviVelocityState(self):
"""Returns the navi velocity state of the interface."""
return osvrGetNaviVelocityState(self.interface)
def getNaviPositionState(self):
"""Returns the navi position state of the interface."""
return osvrGetNaviPositionState(self.interface)
def dispose(self):
"""Frees the interface object."""
if self.freed == False:
self.freed = True
return osvrClientFreeInterface(self.context, self.interface)
def __del__(self):
self.dispose() | BlendOSVR/OSVR-Python | osvr/Interface.py | Python | apache-2.0 | 3,745 |
#!/usr/bin/env python
from generator.actions import Actions, encode
import random
import struct
class VM(Actions):
def start(self):
self.state['init_memory'] = random.randint(0, 0xFFFF)
self.state['init_registers'] = self.chance(0.5)
self.state['registers'] = []
while len(self.state['registers']) < 16:
self.state['registers'].append(random.randint(0,0xFFFFFFFF) if self.state['init_registers'] else 0)
self.state['memory'] = []
for x in xrange(0, 0x10000):
self.state['memory'].append(random.randint(0,0xFF) if x < self.state['init_memory'] else 0)
flags = self.state['init_memory']
if self.state['init_registers']:
flags |= 1 << 31
self.write('\x00MVi')
self.write(struct.pack('<I', flags))
if self.state['init_registers']:
for x in self.state['registers']:
self.write(struct.pack('<I', x))
mem = ''
for x in xrange(0, self.state['init_memory']):
mem += chr(self.state['memory'][x])
if len(mem):
self.write(mem)
def opcode(self):
pass
def rreg(self):
return random.randint(0x0, 0xf)
def rlval(self):
return random.randint(0,0xffff)
def signed(self, lval):
lval = lval & 0xffff
if lval >= 0x8000:
return -(0x10000 - lval)
else:
return lval
def add(self, a, b):
return (a + b) & 0xffffffff
def inst(self, inst, dst, src, lval):
self.write(struct.pack('<I', (inst << 24) | (dst << 20) | (src << 16) | lval))
def opc_load(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
addr = lval
if rsrc != 0:
addr += r[rsrc]
addr &= 0xffff
r[rdst] = self.state['memory'][addr]
self.inst(1, rdst, rsrc, lval)
self.register_dump()
def opc_store(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
addr = lval
if rdst != 0:
addr += r[rdst]
addr &= 0xffff
self.state['memory'][addr] = 0 if rsrc == 0 else (r[rsrc] & 0xff)
self.inst(2, rdst, rsrc, lval)
self.register_dump()
def opc_add(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
if rsrc != 0:
r[rdst] += self.add(r[rsrc], self.signed(lval))
r[rdst] &= 0xffffffff
else:
r[rdst] += self.signed(lval)
r[rdst] &= 0xffffffff
self.inst(4, rdst, rsrc, lval)
self.register_dump()
def opc_sub(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
if rsrc != 0:
r[rdst] -= self.add(r[rsrc], self.signed(lval))
r[rdst] &= 0xffffffff
else:
r[rdst] -= self.signed(lval)
r[rdst] &= 0xffffffff
self.inst(5, rdst, rsrc, lval)
self.register_dump()
def opc_mul(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
if rsrc != 0:
r[rdst] *= self.add(r[rsrc], self.signed(lval))
r[rdst] &= 0xffffffff
else:
r[rdst] *= self.signed(lval)
r[rdst] &= 0xffffffff
self.inst(6, rdst, rsrc, lval)
self.register_dump()
def opc_div(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
if rsrc != 0:
if self.add(r[rsrc], self.signed(lval)) == 0:
lval += 1
r[rdst] /= self.add(r[rsrc], self.signed(lval))
r[rdst] &= 0xffffffff
else:
if lval == 0:
lval += 1
r[rdst] /= self.signed(lval) & 0xffffffff
r[rdst] &= 0xffffffff
self.inst(7, rdst, rsrc, lval)
self.register_dump()
def opc_and(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
if rsrc != 0:
r[rdst] &= r[rsrc] | self.signed(lval)
r[rdst] &= 0xffffffff
else:
r[rdst] &= self.signed(lval)
r[rdst] &= 0xffffffff
self.inst(9, rdst, rsrc, lval)
self.register_dump()
def opc_xor(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
if rsrc != 0:
r[rdst] ^= r[rsrc] | self.signed(lval)
r[rdst] &= 0xffffffff
else:
r[rdst] ^= self.signed(lval)
r[rdst] &= 0xffffffff
self.inst(10, rdst, rsrc, lval)
self.register_dump()
def opc_or(self):
r = self.state['registers']
rdst = self.rreg()
rsrc = self.rreg()
lval = self.rlval()
if rsrc != 0:
r[rdst] |= r[rsrc] | self.signed(lval)
r[rdst] &= 0xffffffff
else:
r[rdst] |= self.signed(lval)
r[rdst] &= 0xffffffff
self.inst(8, rdst, rsrc, lval)
self.register_dump()
def opc_slt(self):
r = self.state['registers']
rdst = self.rreg()
rsrca = self.rreg()
rsrcb = self.rreg()
srca = 0 if rsrca == 0 else r[rsrca]
srcb = 0 if rsrcb == 0 else r[rsrcb]
if srca < srcb:
r[rdst] = 1
else:
r[rdst] = 0
self.inst(11, rdst, rsrca, rsrcb)
self.register_dump()
def opc_slte(self):
r = self.state['registers']
rdst = self.rreg()
rsrca = self.rreg()
rsrcb = self.rreg()
srca = 0 if rsrca == 0 else r[rsrca]
srcb = 0 if rsrcb == 0 else r[rsrcb]
if srca <= srcb:
r[rdst] = 1
else:
r[rdst] = 0
self.inst(12, rdst, rsrca, rsrcb)
self.register_dump()
def opc_syscall(self):
return None
if self.chance(0.5): #transmit
self.read()
else: #receive
self.write()
def register_dump(self):
regs = ''
for x in self.state['registers']:
regs += struct.pack('<I', x)
self.read(length=16*4, expect=regs)
def end(self):
self.inst(4, 0, 0, 0) #nop (add r0, r0, 0)
self.register_dump()
if self.chance(0.2):
self.inst(0x00, 0, 0, 0) #invalid opc
elif self.chance(0.3):
self.inst(0x07, 0, 0, 0) #div by zero
else:
self.inst(0x40, 0, 0, 0) #past end of list
self.read(length=4, expect="DONE")
| f0rki/cb-multios | original-challenges/stream_vm/poller/for-release/machine.py | Python | mit | 6,904 |
from datetime import date
from app.models import TimeExercisesHistory, TimeExercisesTaxonomy, Users
from app.service import TimeExercisesHistoryService, TimeExercisesTaxonomyService, UsersService
from app.service_tests.service_test_case import ServiceTestCase
class TimeExercisesHistoryTests(ServiceTestCase):
def setUp(self):
super(TimeExercisesHistoryTests, self).setUp()
# sample users for tests
user_1 = Users('p@p.p', 'Patrick', 'pass')
user_2 = Users('j@j.j', 'Jaytrick', 'pass')
UsersService.add_user_to_database(user_1)
UsersService.add_user_to_database(user_2)
# sample exercises for tests
TimeExercisesTaxonomyService.add_entry_to_db(
TimeExercisesTaxonomy(name='rowing')
)
TimeExercisesTaxonomyService.add_entry_to_db(
TimeExercisesTaxonomy(name='running')
)
TimeExercisesTaxonomyService.add_entry_to_db(
TimeExercisesTaxonomy(name='elliptical')
)
# add_entry_to_db tests #
def test_add_entry_to_db(self):
expected_entry = TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
)
TimeExercisesHistoryService.add_entry_to_db(expected_entry)
actual_entry = list(TimeExercisesHistory.query.all())[0]
self.assertEqual(actual_entry, expected_entry)
# get_list_of_all_history tests #
def test_get_list_of_all_history_no_entries(self):
expected_list = []
actual_list = TimeExercisesHistoryService.get_list_of_all_history()
self.assertListEqual(actual_list, expected_list)
def test_get_list_of_all_history_one_entry(self):
entry_1 = TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
)
TimeExercisesHistoryService.add_entry_to_db(entry_1)
expected_list = [entry_1]
actual_list = TimeExercisesHistoryService.get_list_of_all_history()
self.assertListEqual(actual_list, expected_list)
def test_get_list_of_all_history_multiple_entries(self):
expected_list = [
TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
),
TimeExercisesHistory(
user_id=2,
exercise_id=2,
distance=4.0,
duration=38,
exercise_date=date(year=2016, month=12, day=30)
),
TimeExercisesHistory(
user_id=2,
exercise_id=3,
distance=2.0,
duration=24.12,
exercise_date=date(year=2016, month=12, day=15)
)
]
TimeExercisesHistoryService.add_entry_to_db(expected_list[0])
TimeExercisesHistoryService.add_entry_to_db(expected_list[1])
TimeExercisesHistoryService.add_entry_to_db(expected_list[2])
actual_list = TimeExercisesHistoryService.get_list_of_all_history()
# no guarantee about ordering is made
self.assertListEqual(sorted(actual_list, key=self._sort_key_date),
sorted(expected_list, key=self._sort_key_date))
# get_list_of_users_exercises tests #
def test_get_list_of_users_exercises_no_exercises(self):
entry_1 = TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
)
entry_2 = TimeExercisesHistory(
user_id=2,
exercise_id=2,
distance=4.0,
duration=38,
exercise_date=date(year=2016, month=12, day=30)
)
TimeExercisesHistoryService.add_entry_to_db(entry_1)
TimeExercisesHistoryService.add_entry_to_db(entry_2)
expected_list = []
actual_list = TimeExercisesHistoryService.get_list_of_users_exercises(1)
self.assertListEqual(actual_list, expected_list)
def test_get_list_of_users_exercises_pulls_correct_exercises(self):
entry_1 = TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
)
entry_2 = TimeExercisesHistory(
user_id=1,
exercise_id=2,
distance=4.0,
duration=38,
exercise_date=date(year=2016, month=12, day=30)
)
entry_3 = TimeExercisesHistory(
user_id=2,
exercise_id=3,
distance=2.0,
duration=24.12,
exercise_date=date(year=2016, month=12, day=15)
)
TimeExercisesHistoryService.add_entry_to_db(entry_1)
TimeExercisesHistoryService.add_entry_to_db(entry_2)
TimeExercisesHistoryService.add_entry_to_db(entry_3)
expected_results = [entry_1, entry_3] # entry_2 was done by a different user
actual_results = TimeExercisesHistoryService.get_list_of_users_exercises(2)
# no guarantee about ordering is made
self.assertListEqual(sorted(actual_results, key=self._sort_key_date),
sorted(expected_results, key=self._sort_key_date))
# get_user_history_by_exercise tests #
def test_get_user_history_by_exercise_empty_db(self):
expected_results = []
actual_results = TimeExercisesHistoryService.get_user_history_by_exercise(user_id=1, exercise_id=2)
self.assertListEqual(actual_results, expected_results)
def test_get_user_history_by_exercise_user_not_done_that_exercise(self):
entry_1 = TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
)
entry_2 = TimeExercisesHistory(
user_id=2,
exercise_id=2,
distance=4.0,
duration=38,
exercise_date=date(year=2016, month=12, day=30)
)
entry_3 = TimeExercisesHistory(
user_id=1,
exercise_id=3,
distance=2.0,
duration=24.12,
exercise_date=date(year=2016, month=12, day=15)
)
TimeExercisesHistoryService.add_entry_to_db(entry_1)
TimeExercisesHistoryService.add_entry_to_db(entry_2)
TimeExercisesHistoryService.add_entry_to_db(entry_3)
expected_results = []
actual_results = TimeExercisesHistoryService.get_user_history_by_exercise(user_id=2, exercise_id=3)
self.assertListEqual(actual_results, expected_results)
def test_get_user_history_by_exercise_user_did_that_exercise(self):
entry_1 = TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
)
entry_2 = TimeExercisesHistory(
user_id=2,
exercise_id=2,
distance=4.0,
duration=38,
exercise_date=date(year=2016, month=12, day=30)
)
entry_3 = TimeExercisesHistory(
user_id=1,
exercise_id=3,
distance=2.0,
duration=24.12,
exercise_date=date(year=2016, month=12, day=15)
)
TimeExercisesHistoryService.add_entry_to_db(entry_1)
TimeExercisesHistoryService.add_entry_to_db(entry_2)
TimeExercisesHistoryService.add_entry_to_db(entry_3)
expected_results = [entry_1]
actual_results = TimeExercisesHistoryService.get_user_history_by_exercise(user_id=2, exercise_id=1)
self.assertListEqual(actual_results, expected_results)
# get_user_history_by_date tests #
def test_get_user_history_by_date_empty_db(self):
expected_results = []
actual_results = TimeExercisesHistoryService.get_user_history_by_date(user_id=1, exercise_date='2017-05-02')
self.assertListEqual(actual_results, expected_results)
def test_get_user_history_by_date_no_match(self):
entry_1 = TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
)
entry_2 = TimeExercisesHistory(
user_id=2,
exercise_id=2,
distance=4.0,
duration=38,
exercise_date=date(year=2016, month=12, day=30)
)
entry_3 = TimeExercisesHistory(
user_id=1,
exercise_id=3,
distance=2.0,
duration=24.12,
exercise_date=date(year=2016, month=12, day=15)
)
TimeExercisesHistoryService.add_entry_to_db(entry_1)
TimeExercisesHistoryService.add_entry_to_db(entry_2)
TimeExercisesHistoryService.add_entry_to_db(entry_3)
expected_results = []
actual_results = TimeExercisesHistoryService.get_user_history_by_date(user_id=2, exercise_date='2016-12-15')
self.assertListEqual(actual_results, expected_results)
def test_get_user_history_by_date_match(self):
entry_1 = TimeExercisesHistory(
user_id=2,
exercise_id=1,
distance=2.3,
duration=34.12,
exercise_date=date(year=2016, month=12, day=31)
)
entry_2 = TimeExercisesHistory(
user_id=2,
exercise_id=2,
distance=4.0,
duration=38,
exercise_date=date(year=2016, month=12, day=31)
)
entry_3 = TimeExercisesHistory(
user_id=1,
exercise_id=3,
distance=2.0,
duration=24.12,
exercise_date=date(year=2016, month=12, day=15)
)
TimeExercisesHistoryService.add_entry_to_db(entry_1)
TimeExercisesHistoryService.add_entry_to_db(entry_2)
TimeExercisesHistoryService.add_entry_to_db(entry_3)
expected_results = [entry_1, entry_2]
actual_results = TimeExercisesHistoryService.get_user_history_by_date(user_id=2, exercise_date='2016-12-31')
self.assertListEqual(sorted(actual_results, key=self._sort_key_exercise_id),
sorted(expected_results, key=self._sort_key_exercise_id))
@staticmethod
def _sort_key_date(x):
return x.exercise_date
@staticmethod
def _sort_key_exercise_id(x):
return x.exercise_id
| pbraunstein/trackercise | app/service_tests/time_exercises_history_service_tests.py | Python | mit | 10,888 |
"""empty message
Revision ID: 30dcafee101
Revises: 2474c24c055
Create Date: 2015-05-02 10:11:46.095370
"""
# revision identifiers, used by Alembic.
revision = '30dcafee101'
down_revision = '2474c24c055'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('response',
sa.Column('response_id', sa.Integer(), nullable=False),
sa.Column('session_id', sa.String(length=300), nullable=True),
sa.Column('user_id', sa.String(length=300), nullable=True),
sa.Column('response_form', sa.Integer(), nullable=True),
sa.Column('response_content', sa.Integer(), nullable=True),
sa.Column('art_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['art_id'], ['art.id'], ),
sa.PrimaryKeyConstraint('response_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('response')
### end Alembic commands ###
| anniejw6/art_flask | migrations/versions/30dcafee101_.py | Python | mit | 1,020 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum
from six import with_metaclass
from azure.core import CaseInsensitiveEnumMeta
class StorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SERVICE_PROFILER = "ServiceProfiler"
| Azure/azure-sdk-for-python | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2020_03_01_preview/models/_application_insights_management_client_enums.py | Python | mit | 683 |
'''
Created on 16 Sep 2016
@author: rizarse
'''
import jks, textwrap, base64
from os.path import expanduser
import os.path
import atexit
import shutil
from os import makedirs
class JksHandler(object):
def __init__(self, params):
pass
@staticmethod
def writePkAndCerts(ks, token):
uid = None
home = expanduser("~")
def deleteCerts(self, path):
shutil.rmtree(path)
atexit.register(deleteCerts, home + '/magistral/' + token)
for alias, pk in ks.private_keys.items():
uid = alias
if pk.algorithm_oid == jks.util.RSA_ENCRYPTION_OID:
if os.path.exists(home + '/magistral/' + token) == False:
makedirs(home + '/magistral/' + token)
key = home + '/magistral/' + token + '/key.pem'
if os.path.exists(key): os.remove(key)
with open(key, 'wb') as f:
f.seek(0)
f.write(bytearray(b"-----BEGIN RSA PRIVATE KEY-----\r\n"))
f.write(bytes("\r\n".join(textwrap.wrap(base64.b64encode(pk.pkey).decode('ascii'), 64)), 'utf-8'))
f.write(bytearray(b"\r\n-----END RSA PRIVATE KEY-----"))
f.close()
counter = 0;
cert = home + '/magistral/' + token + '/certificate.pem'
if os.path.exists(cert): os.remove(cert)
with open(cert, 'wb') as f:
f.seek(0)
for c in pk.cert_chain:
f.write(bytearray(b"-----BEGIN CERTIFICATE-----\r\n"))
f.write(bytes("\r\n".join(textwrap.wrap(base64.b64encode(c[1]).decode('ascii'), 64)), 'utf-8'))
f.write(bytearray(b"\r\n-----END CERTIFICATE-----\r\n"))
counter = counter + 1
if (counter == 2): break
f.close()
ca = home + '/magistral/' + token + '/ca.pem'
if os.path.exists(ca): os.remove(ca)
with open(ca, 'wb') as f:
for alias, c in ks.certs.items():
f.write(bytearray(b"-----BEGIN CERTIFICATE-----\r\n"))
f.write(bytes("\r\n".join(textwrap.wrap(base64.b64encode(c.cert).decode('ascii'), 64)), 'utf-8'))
f.write(bytearray(b"\r\n-----END CERTIFICATE-----\r\n"))
f.close()
return uid
@staticmethod
def printJks(ks):
def print_pem(der_bytes, _type_):
print("-----BEGIN %s-----" % _type_)
print("\r\n".join(textwrap.wrap(base64.b64encode(der_bytes).decode('ascii'), 64)))
print("-----END %s-----" % _type_)
for _, pk in ks.private_keys.items():
print("Private key: %s" % pk.alias)
if pk.algorithm_oid == jks.util.RSA_ENCRYPTION_OID:
print_pem(pk.pkey, "RSA PRIVATE KEY")
else:
print_pem(pk.pkey_pkcs8, "PRIVATE KEY")
for c in pk.cert_chain:
print_pem(c[1], "CERTIFICATE")
print()
for _, c in ks.certs.items():
print("Certificate: %s" % c.alias)
print_pem(c.cert, "CERTIFICATE")
print()
for _, sk in ks.secret_keys.items():
print("Secret key: %s" % sk.alias)
print(" Algorithm: %s" % sk.algorithm)
print(" Key size: %d bits" % sk.key_size)
print(" Key: %s" % "".join("{:02x}".format(b) for b in bytearray(sk.key)))
print()
| magistral-io/MagistralPython | src/magistral/client/util/JksHandler.py | Python | mit | 3,975 |
import json, codecs, re
from abc import ABCMeta, abstractmethod
from PIL import Image, ExifTags
from witica.util import throw, sstr, suni
#regular expressions regarding item ids
RE_METAFILE = r'^meta\/[^\n]+$'
RE_FIRST_ITEMID = r'(?!meta\/)[^\n?@.]+'
RE_ITEMFILE_EXTENSION = r'[^\n?@\/]+'
RE_ITEMID = r'^' + RE_FIRST_ITEMID + '$'
RE_ITEMFILE = r'^' + RE_FIRST_ITEMID + '\.' + RE_ITEMFILE_EXTENSION + '$'
RE_ITEM_SPLIT_ITEMID_EXTENSION = r'^(' + RE_FIRST_ITEMID + ')\.(' + RE_ITEMFILE_EXTENSION + ')$'
RE_ITEM_REFERENCE = r'^!(?:.\/)?' + RE_FIRST_ITEMID + '$'
#regular expressions to be used for md files parsing
RE_MD_SPLIT_JSON_MD = "^\s*({[\s\S]*?})?[\s]*([^}\s][\s\S]*)$" #splits md file into the json metadata and markdown sections as caputre groups
RE_MD_SPLIT_TITLE_BODY = "^(?:#(?!#)[\t ]*([\S][^\n\r]*)(?:\n|\r\n?|$))?([\s\S]*)$" #splits markdown section into title and body sections as capture groups
RE_MD_NOBRACKET = r'[^\]\[]*'
RE_MD_BRK = ( r'\[('
+ (RE_MD_NOBRACKET + r'(\[')*6
+ (RE_MD_NOBRACKET+ r'\])*')*6
+ RE_MD_NOBRACKET + r')\]' )
RE_MD_IMAGE_LINK = r'\!' + RE_MD_BRK + r'\s*\((?!\!)(<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
#RE_MD_ITEM_LINK = r'\!' + RE_MD_BRK + r'\s*\(\!(<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
RE_MD_ITEM_LINK = r'!({[\s\S]*?})?\((![\s\S]+?)\)'
# !{renderparametersjson}(!itemid)
registered_extractors = [];
def register(extension, extractor):
"""Register new metadata extractor for file extension"""
for (ext,extr) in registered_extractors:
if extension == ext:
raise ValueError("A metadata extractor for extension '" + extension + "' is already registered.")
#TODO: check type of extractor
registered_extractors.append((extension,extractor))
#print("registered: " + extension + " " + sstr(extractor))
def register_default_extractors():
register("item", JSONExtractor)
register("json", JSONExtractor)
register("md", MDExtractor)
register("txt", MDExtractor)
register("jpg", ImageExtractor)
register("jpeg", ImageExtractor)
def is_supported(extension):
for (ext,extractor) in registered_extractors:
if extension == ext:
return True
return False
def extract_metadata(filename):
extension = filename.rpartition(".")[2]
for (ext,extractor) in registered_extractors:
if extension == ext:
return extractor().extract_metadata(filename)
raise ValueError("Could not extract metadata, because a metadata extractor for extension '" + extension + "' is not registered.")
class MetadataExtractor(object):
__metaclass__ = ABCMeta
"""Abstract class representing a metadata extractor"""
supported_extensions = [];
def __init__(self):
pass
@abstractmethod
def extract_metadata(self, filename):
"""Extract metadata from filename and return metadata as json"""
pass
class JSONExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from item or json file"""
supported_extensions = ["item", "json"];
def __init__(self):
pass
def extract_metadata(self, filename):
"""Extract metadata from filename and return metadata as json"""
f = codecs.open(filename, mode="r", encoding="utf-8")
return json.loads(f.read())
class MDExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from markdown file"""
supported_extensions = ["md", "txt"];
def __init__(self):
pass
def extract_metadata(self, filename):
try:
meta = {}
#split into json and markdown part
f = codecs.open(filename, mode="r", encoding="utf-8")
match = re.match(RE_MD_SPLIT_JSON_MD,f.read())
f.close()
if not match:
raise IOError("Extracting metadata from file '" + sstr(filename) + "' failed. Could not split JSON and markdown parts.")
jsonstr, mdstr = match.groups()
#get title string (first heading in markdown string) if available
title = re.match(RE_MD_SPLIT_TITLE_BODY,mdstr).group(1)
if not title == None:
meta["title"] = title
#update with explicit json
if not jsonstr == None:
meta.update(json.loads(jsonstr))
return meta
except Exception, e:
throw(IOError, "Extracting metadata from file '" + sstr(filename) + "' failed.", e)
class ImageExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from markdown file"""
supported_extensions = ["jpg", "jpeg"];
def __init__(self):
pass
def extract_metadata(self, filename):
try:
meta = {"type": "image"}
img = Image.open(filename)
exif = {
ExifTags.TAGS[k]: v
for k, v in img._getexif().items()
if k in ExifTags.TAGS
}
if ("ImageDescription" in exif or "UserComment" in exif):
if "UserComment" in exif:
meta["title"] = exif["UserComment"]
if "ImageDescription" in exif:
meta["title"] = exif["ImageDescription"]
if ("Make" in exif or "Model" in exif):
meta["camera"] = (exif["Make"] if "Make" in exif else "") + " " + (exif["Model"] if "Model" in exif else "")
if ("Orientation" in exif):
meta["orientation"] = exif["Orientation"]
if ("Artist" in exif):
meta["author"] = exif["Artist"]
if ("DateTimeOriginal" in exif):
meta["created"] = exif["DateTimeOriginal"] #TODO: convert to unix time
if ("Flash" in exif):
meta["flash"] = exif["Flash"]
if ("GPSInfo" in exif):
lat, lon = self.get_lat_lon(exif["GPSInfo"])
if lat and lon:
meta["lat"] = lat
meta["lon"] = lon
return meta
except Exception, e:
throw(IOError, "Extracting metadata from file '" + sstr(filename) + "' failed.", e)
# This remaining functions in the ImageExtracotr class are originally by Eran Sandler (MIT-license), see https://gist.github.com/erans/983821
def _get_if_exist(self, data, key):
if key in data:
return data[key]
return None
def _convert_to_degress(self, value):
"""Helper function to convert the GPS coordinates stored in the EXIF to degress in float format"""
d0 = value[0][0]
d1 = value[0][1]
d = float(d0) / float(d1)
m0 = value[1][0]
m1 = value[1][1]
m = float(m0) / float(m1)
s0 = value[2][0]
s1 = value[2][1]
s = float(s0) / float(s1)
return d + (m / 60.0) + (s / 3600.0)
def get_lat_lon(self, gps_info_exif):
"""Returns the latitude and longitude, if available, from the provided exif_data (obtained through get_exif_data above)"""
lat = None
lon = None
gps_info = {
ExifTags.GPSTAGS[k]: v
for k, v in gps_info_exif.items()
if k in ExifTags.GPSTAGS
}
gps_latitude = self._get_if_exist(gps_info, "GPSLatitude")
gps_latitude_ref = self._get_if_exist(gps_info, 'GPSLatitudeRef')
gps_longitude = self._get_if_exist(gps_info, 'GPSLongitude')
gps_longitude_ref = self._get_if_exist(gps_info, 'GPSLongitudeRef')
if gps_latitude and gps_latitude_ref and gps_longitude and gps_longitude_ref:
lat = self._convert_to_degress(gps_latitude)
if gps_latitude_ref != "N":
lat = 0 - lat
lon = self._convert_to_degress(gps_longitude)
if gps_longitude_ref != "E":
lon = 0 - lon
return lat, lon | bitsteller/witica | witica/metadata/extractor.py | Python | mit | 7,072 |
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from flask import Flask
from commitsan.hooks_app import app as hooks
frontend = Flask(__name__)
frontend.config['DEBUG'] = True
class CombiningMiddleware(object):
"""Allows one to mount middlewares or applications in a WSGI application.
Unlike DispatcherMiddleware, this one doesn't alter the environment of the
called application. That is, applications still receive the absolute path.
"""
def __init__(self, app, mounts=None):
self.app = app
self.mounts = mounts or {}
def __call__(self, environ, start_response):
script = environ.get('PATH_INFO', '')
while '/' in script:
if script in self.mounts:
app = self.mounts[script]
break
script = script.rsplit('/', 1)[0]
else:
app = self.mounts.get(script, self.app)
return app(environ, start_response)
app = CombiningMiddleware(frontend, {
'/hooks': hooks,
})
@frontend.route('/')
def hello():
return 'Hello World!'
| abusalimov/commitsan | commitsan/web_app.py | Python | mit | 1,160 |
# Copyright (c) Charl P. Botha, TU Delft
# All rights reserved.
# See COPYRIGHT for details.
class MedicalMetaData:
def __init__(self):
self.medical_image_properties = None
self.direction_cosines = None
def close(self):
del self.medical_image_properties
del self.direction_cosines
def deep_copy(self, source_mmd):
"""Given another MedicalMetaData instance source_mmd, copy its
contents to this instance.
"""
if not source_mmd is None:
self.medical_image_properties.DeepCopy(
source_mmd.medical_image_properties)
self.direction_cosines.DeepCopy(
source_mmd.direction_cosines)
| chrisidefix/devide | module_kits/misc_kit/devide_types.py | Python | bsd-3-clause | 720 |
"""Configuration(yaml_files=[...]) tests."""
from dependency_injector import providers
from pytest import fixture, mark, raises
@fixture
def config(config_type, yaml_config_file_1, yaml_config_file_2):
if config_type == "strict":
return providers.Configuration(strict=True)
elif config_type == "default":
return providers.Configuration(yaml_files=[yaml_config_file_1, yaml_config_file_2])
else:
raise ValueError("Undefined config type \"{0}\"".format(config_type))
def test_load(config):
config.load()
assert config() == {
"section1": {
"value1": 11,
"value11": 11,
},
"section2": {
"value2": 2,
},
"section3": {
"value3": 3,
},
}
assert config.section1() == {"value1": 11, "value11": 11}
assert config.section1.value1() == 11
assert config.section1.value11() == 11
assert config.section2() == {"value2": 2}
assert config.section2.value2() == 2
assert config.section3() == {"value3": 3}
assert config.section3.value3() == 3
def test_get_files(config, yaml_config_file_1, yaml_config_file_2):
assert config.get_yaml_files() == [yaml_config_file_1, yaml_config_file_2]
def test_set_files(config):
config.set_yaml_files(["file1.yml", "file2.yml"])
assert config.get_yaml_files() == ["file1.yml", "file2.yml"]
def test_file_does_not_exist(config):
config.set_yaml_files(["./does_not_exist.yml"])
config.load()
assert config() == {}
@mark.parametrize("config_type", ["strict"])
def test_file_does_not_exist_strict_mode(config):
config.set_yaml_files(["./does_not_exist.yml"])
with raises(IOError):
config.load()
assert config() == {}
def test_required_file_does_not_exist(config):
config.set_yaml_files(["./does_not_exist.yml"])
with raises(IOError):
config.load(required=True)
@mark.parametrize("config_type", ["strict"])
def test_not_required_file_does_not_exist_strict_mode(config):
config.set_yaml_files(["./does_not_exist.yml"])
config.load(required=False)
assert config() == {}
def test_missing_envs_required(config, yaml_config_file_3):
with open(yaml_config_file_3, "w") as file:
file.write(
"section:\n"
" undefined: ${UNDEFINED}\n"
)
config.set_yaml_files([yaml_config_file_3])
with raises(ValueError, match="Missing required environment variable \"UNDEFINED\""):
config.load(envs_required=True)
@mark.parametrize("config_type", ["strict"])
def test_missing_envs_not_required_in_strict_mode(config, yaml_config_file_3):
with open(yaml_config_file_3, "w") as file:
file.write(
"section:\n"
" undefined: ${UNDEFINED}\n"
)
config.set_yaml_files([yaml_config_file_3])
config.load(envs_required=False)
assert config.section.undefined() is None
| ets-labs/python-dependency-injector | tests/unit/providers/configuration/test_yaml_files_in_init_py2_py3.py | Python | bsd-3-clause | 2,928 |
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Herald XMPP bot
:author: Thomas Calmant
:copyright: Copyright 2014, isandlaTech
:license: Apache License 2.0
:version: 0.0.5
:status: Alpha
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Module version
__version_info__ = (0, 0, 5)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
# Pelix XMPP utility classes
import pelix.misc.xmpp as pelixmpp
# Standard library
import logging
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
class HeraldBot(pelixmpp.BasicBot, pelixmpp.ServiceDiscoveryMixin):
"""
XMPP Messenger for Herald.
"""
def __init__(self, jid=None, password=None, nick=None):
"""
Sets up the robot
:param jid: Bot JID (None for anonymous connection)
:param password: Authentication password
:param nick: Nick name used in MUC rooms
"""
# Set up the object
pelixmpp.BasicBot.__init__(self, jid, password)
pelixmpp.ServiceDiscoveryMixin.__init__(self)
self._nick = nick
# Message callback
self.__cb_message = None
# Register to events
self.add_event_handler("message", self.__on_message)
def set_message_callback(self, callback):
"""
Sets the method to call when a message is received.
The method takes the message stanza as parameter.
:param callback: Method to call when a message is received
"""
self.__cb_message = callback
def __callback(self, data):
"""
Safely calls back a method
:param data: Associated stanza
"""
method = self.__cb_message
if method is not None:
try:
method(data)
except Exception as ex:
_logger.exception("Error calling method: %s", ex)
def __on_message(self, msg):
"""
XMPP message received
"""
msgtype = msg['type']
msgfrom = msg['from']
if msgtype == 'groupchat':
# MUC Room chat
if self._nick == msgfrom.resource:
# Loopback message
return
elif msgtype not in ('normal', 'chat'):
# Ignore non-chat messages
return
# Callback
self.__callback(msg)
| gattazr/cohorte-herald | python/herald/transports/xmpp/bot.py | Python | apache-2.0 | 3,169 |
#!flask/bin/python
import imp
from migrate.versioning import api
from app import db
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' % (v+1))
tmp_module = imp.new_module('old_model')
old_model = api.create_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
exec old_model in tmp_module.__dict__
script = api.make_update_script_for_model(SQLALCHEMY_DATABASE_URI,
SQLALCHEMY_MIGRATE_REPO,
tmp_module.meta, db.metadata)
open(migration, "wt").write(script)
api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
print('New migration saved as ' + migration)
print('Current database version: ' + str(v))
| tsinghuariit/DMP | project_src/db_migrate.py | Python | apache-2.0 | 938 |
import os
import codecs
from ni.core.workspace import load_workspace
def load_settings_from_file(filename, klass):
settings = klass(filename)
try:
fle = open(filename)
try:
fields = set(settings.fields)
for line in fle.readlines():
try:
name, value = line.strip().split('=', 1)
if name in fields:
settings[name] = value
except ValueError:
pass
return settings
finally:
fle.close()
except IOError:
pass
return settings
class BaseSettings(object):
def __init__(self, filepath):
self.settings_dir = os.path.dirname(filepath)
self.filepath = filepath
self.fields = ['indent_width', 'tab_size', 'file_encoding', 'linesep',
'indent_spaces', 'most_recent_file']
self.field_dict = {}
self._defaults()
def _defaults(self):
# default values
self.field_dict.update({
'indent_width': 4,
'tab_size': 8,
'file_encoding': 'utf8',
'linesep': os.linesep,
'indent_spaces': True,
'most_recent_file': None
})
def __getattr__(self, name):
if name in self.fields:
return self.field_dict[name]
else:
err = "'%s' object has no attribute '%s'" % (type(self), name)
raise AttributeError(err)
def __getitem__(self, k):
return self.field_dict[k]
def __setitem__(self, name, value):
if not name in self.fields:
raise KeyError("'%s' is not a valid field.")
validator_name = '_val_'+name
if hasattr(self, validator_name):
validator = getattr(self, validator_name)
value = validator(value)
self.field_dict[name] = value
def get_recent_files_path(self):
return os.path.join(self.settings_dir, 'recent_files')
def get_workspaces_dir(self):
return os.path.join(self.settings_dir, 'workspaces')
#def get_workspace_path(self, slug):
# return os.path.join(self.get_workspaces_dir(), slug+'.workspace')
def load_workspaces(self):
workspaces = []
workspaces_dir = self.get_workspaces_dir()
filenames = os.listdir(workspaces_dir)
filenames.sort()
for filename in filenames:
extension = filename.split('.')[-1]
if extension == 'workspace':
slug = filename[:-10]
workspaces_dir = self.get_workspaces_dir()
workspace_path = os.path.join(workspaces_dir, slug+'.workspace')
workspaces.append(load_workspace(workspace_path))
return workspaces
def save(self):
def format(name):
formatter_name = '_format_'+name
if hasattr(self, formatter_name):
formatter = getattr(self, formatter_name)
return formatter()
else:
return self[name]
text = "# NOTE: This file is managed automatically, so manual " +\
"changes might be lost.\n"
text += "\n".join([u"%s=%s" % (f, format(f)) for f in self.fields])
text += "\n"
if not os.path.exists(self.settings_dir):
os.mkdir(self.settings_dir)
fle = open(self.filepath, "w")
try:
fle.write(text)
finally:
fle.close()
# --- validators
def _val_indent_width(self, value):
value = int(value)
if not value in (2, 4, 6, 8):
raise Exception()
return value
def _val_tab_size(self, value):
value = int(value)
if not value in (2, 4, 6, 8):
raise Exception()
return value
def _val_indent_spaces(self, value):
return value in (True, 'True')
def _val_file_encoding(self, value):
encoder = codecs.getencoder(value)
return value
def _val_linesep(self, value):
value = value.replace('\\n', '\n').replace('\\r', '\r')
if not value in ('\n', '\r', '\r\n'):
raise Exception()
return value
def _val_most_recent_file(self, value):
if value and value != 'None':
return value
else:
return None
# --- formatters
def _format_linesep(self):
value = self.linesep.replace('\n', '\\n').replace('\r', '\\r')
return value
| lerouxb/ni | editors/base/settings.py | Python | mit | 4,527 |
from django.conf import settings
import subprocess
class Zone(object):
@classmethod
def get_zone_list(cls, zone_file):
file = open(zone_file, 'r')
content = file.read()
zone_list = [line.strip() for line in content.split('\n') if line.strip() != '' and line.strip().find('//') != 0]
zone_list = [line.replace('{', ' { ').replace('}', ' } ').replace(';', ' ; ') for line in zone_list]
print(zone_list)
for zone in zone_list:
print(cls.parse_zone(zone))
return zone_list
@classmethod
def parse_zone(cls, line):
args = [obj.strip() for obj in line.split(' ') if obj.strip() != '']
res = {
'name': args[1].replace('"','').strip(),
}
print(args)
args = args[2:]
depth = 0
print(args)
for arg in args:
i = args.index(arg)
if arg not in ['{', ';'] and args[i+1] not in ['{', ';']:
res[arg] = args[i + 1].replace('"','')
if arg == 'allow-query':
j = args[i+1:].index('{')
k = args[i+1:].index('}')
res[arg] = [ item for item in args[i+j+1:i+k] if item not in ['{', '}', ',', ';']]
return res
@classmethod
def check_os(cls):
cmd = "lsb_release -a"
completed_process = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True)
res = str(completed_process.stdout, 'utf-8')
res = res.split('\n')
res = [line.split(':') for line in res]
obj = {}
for args in res:
if len(args) == 2:
obj[args[0].strip()] = args[1].strip()
print(settings.SUPPORTED_OSES)
for sup_os in settings.SUPPORTED_OSES:
if obj['Distributor ID'] == sup_os['Distributor ID'] and (obj['Release'] == sup_os['Release'] or sup_os.get('Release') == None):
obj['Supported OS'] = True
break
return obj
class App(object):
apps = {}
apps_version_cmd = {
'git': 'git --version',
'named': 'named -v',
}
def __init__(self, app_name):
self.name = app_name
self._get_vers_cmd = self.__class__.apps_version_cmd.get(self.name)
self._is_installed = None
self._app_version = None
self.__class__.apps[self.name] = self
@classmethod
def get(cls, app_name):
app = cls.apps.get(app_name)
if app is None:
app = cls(app_name)
return app
@classmethod
def check_app_version(cls, app_name):
cmd = cls.get(app_name)._get_vers_cmd
if cmd:
completed_process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
if completed_process.returncode == 0:
return str(completed_process.stdout, 'utf-8')
return ''
def check_app_version(self):
cmd = self._get_vers_cmd
if cmd:
completed_process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
if completed_process.returncode == 0:
return str(completed_process.stdout, 'utf-8')
return ''
@property
def is_installed(self):
self._is_installed = self.app_version != ''
return self._is_installed
@property
def app_version(self):
self._app_version = self.check_app_version()
return self._app_version
| ragibkl/blackhole | website/common/utils.py | Python | gpl-3.0 | 3,481 |
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import numpy as np
from sklearn.metrics import fbeta_score
__all__ = ["BaseClassifier"]
class BaseClassifier(object):
def __init__(self):
return None
def train(self, predictors, classifications, **kwargs):
raise NotImplementedError("function should be overloaded by subclass")
def classify(self, predictors, **kwargs):
raise NotImplementedError("function should be overloaded by subclass")
def score(self, predictors, true_classifications, **kwargs):
"""
Score the classifier's performance.
:param predictors:
An :class:`astropy.table.Table` of possible predictors, where the
number of rows is the number of objects in the validation set.
:param true_classifications:
An array of classifications for all objects in the validation set.
This array should have the same length as the number of predictor
rows.
:returns:
A four-length tuple containing the number of true transients found,
the number of transients missed, the number of false positives,
and the calculated score.
"""
N = len(true_classifications)
if len(predictors) != N:
raise ValueError("number of predictor rows does not match "\
"number of classifications")
classifications = self.classify(predictors).astype(int)
is_true_transient = (true_classifications == 1)
N_transients_found = np.sum(classifications[is_true_transient] == 1)
N_transients_missed = np.sum(classifications[is_true_transient] == 0)
N_false_positives = np.sum(classifications[~is_true_transient])
score = fbeta_score(true_classifications, classifications, beta=2)
return (
N_transients_found,
N_transients_missed,
N_false_positives,
score
)
| GOTO-OBS/goto-vegas | classifier/base.py | Python | mit | 2,014 |
import kivy
from kivy.app import App
from kivy.uix.scatter import Scatter
from kivy.uix.label import Label
from kivy.uix.floatlayout import FloatLayout
class HelloApp(App):
def build(self):
f = FloatLayout()
s = Scatter()
l = Label(text="Hello, World!",
font_size=150)
f.add_widget(s)
s.add_widget(l)
return f
if __name__ == "__main__":
HelloApp().run()
| izaharkin/kivy-simple-app | hello_world.py | Python | gpl-3.0 | 432 |
# written 2021-12-26 by mza
# last updated 2022-01-25 by mza
# to install on a circuitpython device:
# rsync -av *.py /media/circuitpython/
# cp -a particle_man.py /media/circuitpython/code.py
# cd ~/build/adafruit-circuitpython/bundle/lib
# rsync -r adafruit_register neopixel.mpy adafruit_pm25 adafruit_io adafruit_esp32spi adafruit_requests.mpy /media/circuitpython/lib/
header_string = "date/time"
dir = "/logs"
should_use_airlift = True
N = 24
use_built_in_wifi = True
delay_between_acquisitions = 2.3
delay_between_posting_and_next_acquisition = 1.0
import sys
import time
import atexit
import supervisor
import board
import busio
import pwmio
import simpleio
#from adafruit_onewire.bus import OneWireBus
#import pct2075_adafruit
#import bh1750_adafruit
#import ltr390_adafruit
#import vcnl4040_adafruit
#import as7341_adafruit
#import pcf8523_adafruit
#import microsd_adafruit
import neopixel_adafruit
import pm25_adafruit
#import ds18b20_adafruit
#import tsl2591_adafruit
#import anemometer
#import sht31d_adafruit
import airlift
#import gps_adafruit
from DebugInfoWarningError24 import debug, info, warning, error, debug2, debug3, set_verbosity, create_new_logfile_with_string_embedded, flush
import generic
def print_header():
info("" + header_string)
def print_compact(string):
try:
date = time.strftime("%Y-%m-%d+%X")
except:
try:
date = pcf8523_adafruit.get_timestring1()
except:
try:
date = gps_adafruit.get_time()
except:
date = ""
info("%s%s" % (date, string))
def main():
global neopixel_is_available
try:
neopixel_is_available = neopixel_adafruit.setup_neopixel()
except:
warning("error setting up neopixel")
if neopixel_is_available:
neopixel_adafruit.set_color(100, 100, 100)
global i2c
try:
i2c = busio.I2C(board.SCL1, board.SDA1)
string = "using I2C1 "
except:
i2c = busio.I2C(board.SCL, board.SDA)
string = "using I2C0 "
global pm25_is_available
global header_string
try:
i2c_address = pm25_adafruit.setup(i2c, N)
pm25_is_available = True
header_string += ", pm1.0s, pm2.5s, pm10.0s, pm1.0e, pm2.5e, pm10.0e, 0.3um, 0.5um, 1.0um, 2.5um, 5.0um, 10.0um"
except:
warning("pm25 not found")
pm25_is_available = False
global airlift_is_available
if should_use_airlift:
if use_built_in_wifi:
airlift_is_available = airlift.setup_wifi("RoamIfYouWantTwo")
else:
airlift_is_available = airlift.setup_airlift("RoamIfYouWantTwo", spi, board.D13, board.D11, board.D12)
if airlift_is_available:
info("airlift is available")
header_string += ", RSSI-dB"
airlift.setup_feed("particle0p3")
airlift.setup_feed("particle0p5")
airlift.setup_feed("particle1p0")
airlift.setup_feed("particle2p5")
airlift.setup_feed("particle5p0")
airlift.setup_feed("particle10p0")
else:
info("airlift is NOT available")
airlift_is_available = False
if 0:
if airlift_is_available:
airlift.update_time_from_server()
#gnuplot> set key autotitle columnheader
#gnuplot> set style data lines
#gnuplot> plot for [i=1:14] "solar_water_heater.log" using 0:i
print_header()
global i
i = 0
while pm25_adafruit.test_if_present():
loop()
info("pm25 not available; cannot continue")
if neopixel_is_available:
neopixel_adafruit.set_color(0, 255, 255)
def loop():
#info("")
#info(str(i))
if neopixel_is_available:
neopixel_adafruit.set_color(255, 0, 0)
string = ""
if pm25_is_available:
string += pm25_adafruit.measure_string()
if airlift_is_available:
string += airlift.measure_string()
print_compact(string)
flush()
if neopixel_is_available:
neopixel_adafruit.set_color(0, 255, 0)
global i
i += 1
if 0==i%N:
if pm25_is_available:
pm25_adafruit.show_average_values()
if airlift_is_available:
try:
airlift.post_data("particle0p3", pm25_adafruit.get_average_values()[6])
except:
warning("couldn't post 0p3 data for pm25")
try:
airlift.post_data("particle0p5", pm25_adafruit.get_average_values()[7])
except:
warning("couldn't post 0p5 data for pm25")
try:
airlift.post_data("particle1p0", pm25_adafruit.get_average_values()[8])
except:
warning("couldn't post 1p0 data for pm25")
try:
airlift.post_data("particle2p5", pm25_adafruit.get_average_values()[9])
except:
warning("couldn't post 2p5 data for pm25")
try:
airlift.post_data("particle5p0", pm25_adafruit.get_average_values()[10])
except:
warning("couldn't post 5p0 data for pm25")
try:
airlift.post_data("particle10p0", pm25_adafruit.get_average_values()[11])
except:
warning("couldn't post 10p0 data for pm25")
info("waiting...")
time.sleep(delay_between_posting_and_next_acquisition)
if neopixel_is_available:
neopixel_adafruit.set_color(0, 0, 255)
if airlift_is_available:
if 0==i%86300:
airlift.update_time_from_server()
time.sleep(delay_between_acquisitions)
if __name__ == "__main__":
#supervisor.disable_autoreload()
atexit.register(generic.reset)
try:
main()
except KeyboardInterrupt:
info("caught ctrl-c")
flush()
atexit.unregister(generic.reset)
sys.exit(0)
except ReloadException:
info("reload exception")
flush()
atexit.unregister(generic.reset)
time.sleep(1)
supervisor.reload()
info("leaving program...")
flush()
generic.reset()
| mzandrew/bin | embedded/particle_man.py | Python | gpl-3.0 | 5,252 |
#!/usr/bin/env python
import os
import sys
import dotenv
PROJECT_PATH = os.path.dirname(__file__)
dotenv.load_dotenv(os.path.join(PROJECT_PATH, ".env"))
dotenv.load_dotenv(os.path.join(PROJECT_PATH, ".env_defaults"))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "eth_alarm.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| pipermerriam/ethereum-alarm-clock-web | manage.py | Python | mit | 429 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
import time
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools import float_round, float_is_zero, float_compare
from openerp.tools.translate import _
CURRENCY_DISPLAY_PATTERN = re.compile(r'(\w+)\s*(?:\((.*)\))?')
class res_currency(osv.osv):
def _current_rate(self, cr, uid, ids, name, arg, context=None):
return self._get_current_rate(cr, uid, ids, context=context)
def _current_rate_silent(self, cr, uid, ids, name, arg, context=None):
return self._get_current_rate(cr, uid, ids, raise_on_no_rate=False, context=context)
def _get_current_rate(self, cr, uid, ids, raise_on_no_rate=True, context=None):
if context is None:
context = {}
res = {}
date = context.get('date') or time.strftime('%Y-%m-%d')
for id in ids:
cr.execute('SELECT rate FROM res_currency_rate '
'WHERE currency_id = %s '
'AND name <= %s '
'ORDER BY name desc LIMIT 1',
(id, date))
if cr.rowcount:
res[id] = cr.fetchone()[0]
elif not raise_on_no_rate:
res[id] = 0
else:
currency = self.browse(cr, uid, id, context=context)
raise osv.except_osv(_('Error!'),_("No currency rate associated for currency '%s' for the given period" % (currency.name)))
return res
_name = "res.currency"
_description = "Currency"
_columns = {
# Note: 'code' column was removed as of v6.0, the 'name' should now hold the ISO code.
'name': fields.char('Currency', size=3, required=True, help="Currency Code (ISO 4217)"),
'symbol': fields.char('Symbol', size=4, help="Currency sign, to be used when printing amounts."),
'rate': fields.function(_current_rate, string='Current Rate', digits=(12,6),
help='The rate of the currency to the currency of rate 1.'),
# Do not use for computation ! Same as rate field with silent failing
'rate_silent': fields.function(_current_rate_silent, string='Current Rate', digits=(12,6),
help='The rate of the currency to the currency of rate 1 (0 if no rate defined).'),
'rate_ids': fields.one2many('res.currency.rate', 'currency_id', 'Rates'),
'accuracy': fields.integer('Computational Accuracy'),
'rounding': fields.float('Rounding Factor', digits=(12,6)),
'active': fields.boolean('Active'),
'company_id':fields.many2one('res.company', 'Company'),
'date': fields.date('Date'),
'base': fields.boolean('Base'),
'position': fields.selection([('after','After Amount'),('before','Before Amount')], 'Symbol Position', help="Determines where the currency symbol should be placed after or before the amount.")
}
_defaults = {
'active': 1,
'position' : 'after',
'rounding': 0.01,
'accuracy': 4,
'company_id': False,
}
_sql_constraints = [
# this constraint does not cover all cases due to SQL NULL handling for company_id,
# so it is complemented with a unique index (see below). The constraint and index
# share the same prefix so that IntegrityError triggered by the index will be caught
# and reported to the user with the constraint's error message.
('unique_name_company_id', 'unique (name, company_id)', 'The currency code must be unique per company!'),
]
_order = "name"
def init(self, cr):
# CONSTRAINT/UNIQUE INDEX on (name,company_id)
# /!\ The unique constraint 'unique_name_company_id' is not sufficient, because SQL92
# only support field names in constraint definitions, and we need a function here:
# we need to special-case company_id to treat all NULL company_id as equal, otherwise
# we would allow duplicate "global" currencies (all having company_id == NULL)
cr.execute("""SELECT indexname FROM pg_indexes WHERE indexname = 'res_currency_unique_name_company_id_idx'""")
if not cr.fetchone():
cr.execute("""CREATE UNIQUE INDEX res_currency_unique_name_company_id_idx
ON res_currency
(name, (COALESCE(company_id,-1)))""")
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
res = super(res_currency, self).read(cr, user, ids, fields, context, load)
currency_rate_obj = self.pool.get('res.currency.rate')
values = res
if not isinstance(values, list):
values = [values]
for r in values:
if r.__contains__('rate_ids'):
rates=r['rate_ids']
if rates:
currency_date = currency_rate_obj.read(cr, user, rates[0], ['name'])['name']
r['date'] = currency_date
return res
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
results = super(res_currency,self)\
.name_search(cr, user, name, args, operator=operator, context=context, limit=limit)
if not results:
name_match = CURRENCY_DISPLAY_PATTERN.match(name)
if name_match:
results = super(res_currency,self)\
.name_search(cr, user, name_match.group(1), args, operator=operator, context=context, limit=limit)
return results
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
reads = self.read(cr, uid, ids, ['name','symbol'], context=context, load='_classic_write')
return [(x['id'], tools.ustr(x['name'])) for x in reads]
def round(self, cr, uid, currency, amount):
"""Return ``amount`` rounded according to ``currency``'s
rounding rules.
:param browse_record currency: currency for which we are rounding
:param float amount: the amount to round
:return: rounded float
"""
return float_round(amount, precision_rounding=currency.rounding)
def compare_amounts(self, cr, uid, currency, amount1, amount2):
"""Compare ``amount1`` and ``amount2`` after rounding them according to the
given currency's precision..
An amount is considered lower/greater than another amount if their rounded
value is different. This is not the same as having a non-zero difference!
For example 1.432 and 1.431 are equal at 2 digits precision,
so this method would return 0.
However 0.006 and 0.002 are considered different (returns 1) because
they respectively round to 0.01 and 0.0, even though
0.006-0.002 = 0.004 which would be considered zero at 2 digits precision.
:param browse_record currency: currency for which we are rounding
:param float amount1: first amount to compare
:param float amount2: second amount to compare
:return: (resp.) -1, 0 or 1, if ``amount1`` is (resp.) lower than,
equal to, or greater than ``amount2``, according to
``currency``'s rounding.
"""
return float_compare(amount1, amount2, precision_rounding=currency.rounding)
def is_zero(self, cr, uid, currency, amount):
"""Returns true if ``amount`` is small enough to be treated as
zero according to ``currency``'s rounding rules.
Warning: ``is_zero(amount1-amount2)`` is not always equivalent to
``compare_amounts(amount1,amount2) == 0``, as the former will round after
computing the difference, while the latter will round before, giving
different results for e.g. 0.006 and 0.002 at 2 digits precision.
:param browse_record currency: currency for which we are rounding
:param float amount: amount to compare with currency's zero
"""
return float_is_zero(amount, precision_rounding=currency.rounding)
def _get_conversion_rate(self, cr, uid, from_currency, to_currency, context=None):
if context is None:
context = {}
ctx = context.copy()
from_currency = self.browse(cr, uid, from_currency.id, context=ctx)
to_currency = self.browse(cr, uid, to_currency.id, context=ctx)
if from_currency.rate == 0 or to_currency.rate == 0:
date = context.get('date', time.strftime('%Y-%m-%d'))
if from_currency.rate == 0:
currency_symbol = from_currency.symbol
else:
currency_symbol = to_currency.symbol
raise osv.except_osv(_('Error'), _('No rate found \n' \
'for the currency: %s \n' \
'at the date: %s') % (currency_symbol, date))
return to_currency.rate/from_currency.rate
def _compute(self, cr, uid, from_currency, to_currency, from_amount, round=True, context=None):
if (to_currency.id == from_currency.id):
if round:
return self.round(cr, uid, to_currency, from_amount)
else:
return from_amount
else:
rate = self._get_conversion_rate(cr, uid, from_currency, to_currency, context=context)
if round:
return self.round(cr, uid, to_currency, from_amount * rate)
else:
return from_amount * rate
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount,
round=True, context=None):
context = context or {}
if not from_currency_id:
from_currency_id = to_currency_id
if not to_currency_id:
to_currency_id = from_currency_id
xc = self.browse(cr, uid, [from_currency_id,to_currency_id], context=context)
from_currency = (xc[0].id == from_currency_id and xc[0]) or xc[1]
to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1]
return self._compute(cr, uid, from_currency, to_currency, from_amount, round, context)
class res_currency_rate(osv.osv):
_name = "res.currency.rate"
_description = "Currency Rate"
_columns = {
'name': fields.datetime('Date', required=True, select=True),
'rate': fields.float('Rate', digits=(12, 6), help='The rate of the currency to the currency of rate 1'),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
}
_defaults = {
'name': lambda *a: time.strftime('%Y-%m-%d'),
}
_order = "name desc"
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| oscarolar/odoo | openerp/addons/base/res/res_currency.py | Python | agpl-3.0 | 11,762 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from pylib import android_commands
from pylib.device import device_utils
class OmapThrottlingDetector(object):
"""Class to detect and track thermal throttling on an OMAP 4."""
OMAP_TEMP_FILE = ('/sys/devices/platform/omap/omap_temp_sensor.0/'
'temperature')
@staticmethod
def IsSupported(device):
return device.FileExists(OmapThrottlingDetector.OMAP_TEMP_FILE)
def __init__(self, device):
self._device = device
@staticmethod
def BecameThrottled(log_line):
return 'omap_thermal_throttle' in log_line
@staticmethod
def BecameUnthrottled(log_line):
return 'omap_thermal_unthrottle' in log_line
@staticmethod
def GetThrottlingTemperature(log_line):
if 'throttle_delayed_work_fn' in log_line:
return float([s for s in log_line.split() if s.isdigit()][0]) / 1000.0
def GetCurrentTemperature(self):
tempdata = self._device.ReadFile(OmapThrottlingDetector.OMAP_TEMP_FILE)
return float(tempdata) / 1000.0
class ExynosThrottlingDetector(object):
"""Class to detect and track thermal throttling on an Exynos 5."""
@staticmethod
def IsSupported(device):
return device.FileExists('/sys/bus/exynos5-core')
def __init__(self, device):
pass
@staticmethod
def BecameThrottled(log_line):
return 'exynos_tmu: Throttling interrupt' in log_line
@staticmethod
def BecameUnthrottled(log_line):
return 'exynos_thermal_unthrottle: not throttling' in log_line
@staticmethod
def GetThrottlingTemperature(_log_line):
return None
@staticmethod
def GetCurrentTemperature():
return None
class ThermalThrottle(object):
"""Class to detect and track thermal throttling.
Usage:
Wait for IsThrottled() to be False before running test
After running test call HasBeenThrottled() to find out if the
test run was affected by thermal throttling.
"""
def __init__(self, device):
# TODO(jbudorick) Remove once telemetry gets switched over.
if isinstance(device, android_commands.AndroidCommands):
device = device_utils.DeviceUtils(device)
self._device = device
self._throttled = False
self._detector = None
if OmapThrottlingDetector.IsSupported(device):
self._detector = OmapThrottlingDetector(device)
elif ExynosThrottlingDetector.IsSupported(device):
self._detector = ExynosThrottlingDetector(device)
def HasBeenThrottled(self):
"""True if there has been any throttling since the last call to
HasBeenThrottled or IsThrottled.
"""
return self._ReadLog()
def IsThrottled(self):
"""True if currently throttled."""
self._ReadLog()
return self._throttled
def _ReadLog(self):
if not self._detector:
return False
has_been_throttled = False
serial_number = str(self._device)
log = self._device.RunShellCommand('dmesg -c')
degree_symbol = unichr(0x00B0)
for line in log:
if self._detector.BecameThrottled(line):
if not self._throttled:
logging.warning('>>> Device %s thermally throttled', serial_number)
self._throttled = True
has_been_throttled = True
elif self._detector.BecameUnthrottled(line):
if self._throttled:
logging.warning('>>> Device %s thermally unthrottled', serial_number)
self._throttled = False
has_been_throttled = True
temperature = self._detector.GetThrottlingTemperature(line)
if temperature is not None:
logging.info(u'Device %s thermally throttled at %3.1f%sC',
serial_number, temperature, degree_symbol)
if logging.getLogger().isEnabledFor(logging.DEBUG):
# Print current temperature of CPU SoC.
temperature = self._detector.GetCurrentTemperature()
if temperature is not None:
logging.debug(u'Current SoC temperature of %s = %3.1f%sC',
serial_number, temperature, degree_symbol)
# Print temperature of battery, to give a system temperature
dumpsys_log = self._device.RunShellCommand('dumpsys battery')
for line in dumpsys_log:
if 'temperature' in line:
btemp = float([s for s in line.split() if s.isdigit()][0]) / 10.0
logging.debug(u'Current battery temperature of %s = %3.1f%sC',
serial_number, btemp, degree_symbol)
return has_been_throttled
| guorendong/iridium-browser-ubuntu | build/android/pylib/perf/thermal_throttle.py | Python | bsd-3-clause | 4,529 |
from typing import NamedTuple
class _version_info(NamedTuple):
# similar to sys._version_info
major: int
minor: int
micro: int
version_info = _version_info(0, 10, 1)
__version__ = '.'.join(map(str, version_info))
def config_for_app(config):
import warnings
warnings.warn((
"It is safe to remove this function call now. "
"import biothings.hub will take care of it."
), DeprecationWarning)
| biothings/biothings.api | biothings/__init__.py | Python | apache-2.0 | 438 |
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package layer_model_instantiator
# Module caffe2.python.layer_model_instantiator
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, schema
from caffe2.python.layers.layers import InstantiationContext
from caffe2.python.layers.tags import Tags
def _filter_layers(layers, include_tags):
if include_tags is None:
return layers
include_tags = set(include_tags)
return [l for l in layers if not include_tags.isdisjoint(l.tags)]
def shrink_output_schema(net, out_schema):
if len(out_schema.field_names()) <= 1:
return out_schema
exists = [net.BlobIsDefined(blob) for blob in out_schema.field_blobs()]
return schema.from_column_list(
[
col_name for ok, col_name in
zip(exists, out_schema.field_names()) if ok
],
[
col_type for ok, col_type in
zip(exists, out_schema.field_types()) if ok
],
[
col_blob for ok, col_blob in
zip(exists, out_schema.field_blobs()) if ok
],
[
col_meta for ok, col_meta in
zip(exists, out_schema.field_metadata()) if ok
]
)
def generate_predict_net(model, include_tags=None):
predict_net = core.Net('predict_net')
for layer in _filter_layers(model.layers, include_tags):
if Tags.EXCLUDE_FROM_PREDICTION not in layer.tags:
layer.add_operators(
predict_net, context=InstantiationContext.PREDICTION)
predict_net.set_input_record(model.input_feature_schema.clone())
output_schema = shrink_output_schema(
predict_net, model.output_schema.clone()
)
predict_net.set_output_record(output_schema)
return predict_net
def generate_eval_net(model, include_tags=None):
eval_net = core.Net('eval_net')
for layer in _filter_layers(model.layers, include_tags):
if Tags.EXCLUDE_FROM_EVAL not in layer.tags:
layer.add_operators(eval_net, context=InstantiationContext.EVAL)
input_schema = model.input_feature_schema + model.trainer_extra_schema
eval_net.set_input_record(input_schema)
output_schema = shrink_output_schema(
eval_net, model.output_schema + model.metrics_schema
)
eval_net.set_output_record(output_schema)
return eval_net
def _generate_training_net_only(model, include_tags=None):
train_net = core.Net('train_net')
train_init_net = model.create_init_net('train_init_net')
for layer in _filter_layers(model.layers, include_tags):
if Tags.EXCLUDE_FROM_TRAIN not in layer.tags:
layer.add_operators(train_net, train_init_net)
input_schema = model.input_feature_schema + model.trainer_extra_schema
train_net.set_input_record(input_schema)
output_schema = shrink_output_schema(
train_net, model.output_schema + model.metrics_schema
)
train_net.set_output_record(output_schema)
return train_init_net, train_net
def generate_training_nets_forward_only(model, include_tags=None):
train_init_net, train_net = _generate_training_net_only(model, include_tags)
return train_init_net, train_net
def generate_training_nets(model, include_tags=None):
train_init_net, train_net = _generate_training_net_only(model, include_tags)
loss = model.loss
grad_map = train_net.AddGradientOperators(loss.field_blobs())
model.apply_optimizers(train_net, train_init_net, grad_map)
return train_init_net, train_net
| davinwang/caffe2 | caffe2/python/layer_model_instantiator.py | Python | apache-2.0 | 4,254 |
# Generated by Django 2.1.7 on 2019-04-26 11:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("mediafiles", "0003_auto_20190119_1425"),
("assignments", "0006_auto_20190119_1425"),
]
operations = [
migrations.AddField(
model_name="assignment",
name="attachments",
field=models.ManyToManyField(blank=True, to="mediafiles.Mediafile"),
)
]
| jwinzer/OpenSlides | server/openslides/assignments/migrations/0007_assignment_attachments.py | Python | mit | 477 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Node driver for Aliyun.
"""
try:
import simplejson as json
except ImportError:
import json
import time
from libcloud.common.aliyun import AliyunXmlResponse, SignedAliyunConnection
from libcloud.common.types import LibcloudError
from libcloud.compute.base import Node, NodeDriver, NodeImage, NodeSize, \
StorageVolume, VolumeSnapshot, NodeLocation
from libcloud.compute.types import NodeState, StorageVolumeState, \
VolumeSnapshotState
from libcloud.utils.py3 import _real_unicode as u
from libcloud.utils.xml import findall, findattr, findtext
__all__ = [
'DiskCategory',
'InternetChargeType',
'ECS_API_VERSION',
'ECSDriver',
'ECSSecurityGroup',
'ECSZone'
]
ECS_API_VERSION = '2014-05-26'
ECS_API_ENDPOINT = 'ecs.aliyuncs.com'
DEFAULT_SIGNATURE_VERSION = '1.0'
def _parse_bool(value):
if isinstance(value, bool):
return value
if u(value).lower() == 'true':
return True
return False
"""
Define the extra dictionary for specific resources
"""
RESOURCE_EXTRA_ATTRIBUTES_MAP = {
'node': {
'description': {
'xpath': 'Description',
'transform_func': u
},
'image_id': {
'xpath': 'ImageId',
'transform_func': u
},
'zone_id': {
'xpath': 'ZoneId',
'transform_func': u
},
'instance_type': {
'xpath': 'InstanceType',
'transform_func': u
},
'instance_type_family': {
'xpath': 'InstanceTypeFamily',
'transform_func': u
},
'hostname': {
'xpath': 'HostName',
'transform_func': u
},
'serial_number': {
'xpath': 'SerialNumber',
'transform_func': u
},
'internet_charge_type': {
'xpath': 'InternetChargeType',
'transform_func': u
},
'creation_time': {
'xpath': 'CreationTime',
'transform_func': u
},
'instance_network_type': {
'xpath': 'InstanceNetworkType',
'transform_func': u
},
'instance_charge_type': {
'xpath': 'InstanceChargeType',
'transform_func': u
},
'device_available': {
'xpath': 'DeviceAvailable',
'transform_func': u
},
'io_optimized': {
'xpath': 'IoOptimized',
'transform_func': u
},
'expired_time': {
'xpath': 'ExpiredTime',
'transform_func': u
}
},
'vpc_attributes': {
'vpc_id': {
'xpath': 'VpcId',
'transform_func': u
},
'vswitch_id': {
'xpath': 'VSwitchId',
'transform_func': u
},
'private_ip_address': {
'xpath': 'PrivateIpAddress/IpAddress',
'transform_func': u
},
'nat_ip_address': {
'xpath': 'NatIpAddress',
'transform_func': u
}
},
'eip_address_associate': {
'allocation_id': {
'xpath': 'AllocationId',
'transform_func': u
},
'ip_address': {
'xpath': 'IpAddress',
'transform_func': u
},
'bandwidth': {
'xpath': 'Bandwidth',
'transform_func': int
},
'internet_charge_type': {
'xpath': 'InternetChargeType',
'transform_func': u
}
},
'operation_locks': {
'lock_reason': {
'xpath': 'LockReason',
'transform_func': u
}
},
'volume': {
'region_id': {
'xpath': 'RegionId',
'transform_func': u
},
'zone_id': {
'xpath': 'ZoneId',
'transform_func': u
},
'description': {
'xpath': 'Description',
'transform_func': u
},
'type': {
'xpath': 'Type',
'transform_func': u
},
'category': {
'xpath': 'Category',
'transform_func': u
},
'image_id': {
'xpath': 'ImageId',
'transform_func': u
},
'source_snapshot_id': {
'xpath': 'SourceSnapshotId',
'transform_func': u
},
'product_code': {
'xpath': 'ProductCode',
'transform_func': u
},
'portable': {
'xpath': 'Portable',
'transform_func': _parse_bool
},
'instance_id': {
'xpath': 'InstanceId',
'transform_func': u
},
'device': {
'xpath': 'Device',
'transform_func': u
},
'delete_with_instance': {
'xpath': 'DeleteWithInstance',
'transform_func': _parse_bool
},
'enable_auto_snapshot': {
'xpath': 'EnableAutoSnapshot',
'transform_func': _parse_bool
},
'creation_time': {
'xpath': 'CreationTime',
'transform_func': u
},
'attached_time': {
'xpath': 'AttachedTime',
'transform_func': u
},
'detached_time': {
'xpath': 'DetachedTime',
'transform_func': u
},
'disk_charge_type': {
'xpath': 'DiskChargeType',
'transform_func': u
}
},
'snapshot': {
'snapshot_name': {
'xpath': 'SnapshotName',
'transform_func': u
},
'description': {
'xpath': 'Description',
'transform_func': u
},
'progress': {
'xpath': 'Progress',
'transform_func': u
},
'source_disk_id': {
'xpath': 'SourceDiskId',
'transform_func': u
},
'source_disk_size': {
'xpath': 'SourceDiskSize',
'transform_func': int
},
'source_disk_type': {
'xpath': 'SourceDiskType',
'transform_func': u
},
'product_code': {
'xpath': 'ProductCode',
'transform_func': u
},
'usage': {
'xpath': 'Usage',
'transform_func': u
}
},
'image': {
'image_version': {
'xpath': 'ImageVersion',
'transform_func': u
},
'os_type': {
'xpath': 'OSType',
'transform_func': u
},
'platform': {
'xpath': 'Platform',
'transform_func': u
},
'architecture': {
'xpath': 'Architecture',
'transform_func': u
},
'description': {
'xpath': 'Description',
'transform_func': u
},
'size': {
'xpath': 'Size',
'transform_func': int
},
'image_owner_alias': {
'xpath': 'ImageOwnerAlias',
'transform_func': u
},
'os_name': {
'xpath': 'OSName',
'transform_func': u
},
'product_code': {
'xpath': 'ProductCode',
'transform_func': u
},
'is_subscribed': {
'xpath': 'IsSubscribed',
'transform_func': _parse_bool
},
'progress': {
'xpath': 'Progress',
'transform_func': u
},
'creation_time': {
'xpath': 'CreationTime',
'transform_func': u
},
'usage': {
'xpath': 'Usage',
'transform_func': u
},
'is_copied': {
'xpath': 'IsCopied',
'transform_func': _parse_bool
}
},
'disk_device_mapping': {
'snapshot_id': {
'xpath': 'SnapshotId',
'transform_func': u
},
'size': {
'xpath': 'Size',
'transform_func': int
},
'device': {
'xpath': 'Device',
'transform_func': u
},
'format': {
'xpath': 'Format',
'transform_func': u
},
'import_oss_bucket': {
'xpath': 'ImportOSSBucket',
'transform_func': u
},
'import_oss_object': {
'xpath': 'ImportOSSObject',
'transform_func': u
}
}
}
class ECSConnection(SignedAliyunConnection):
"""
Represents a single connection to the Aliyun ECS Endpoint.
"""
api_version = ECS_API_VERSION
host = ECS_API_ENDPOINT
responseCls = AliyunXmlResponse
service_name = 'ecs'
class ECSSecurityGroup(object):
"""
Security group used to control nodes internet and intranet accessibility.
"""
def __init__(self, id, name, description=None, driver=None, vpc_id=None,
creation_time=None):
self.id = id
self.name = name
self.description = description
self.driver = driver
self.vpc_id = vpc_id
self.creation_time = creation_time
def __repr__(self):
return ('<ECSSecurityGroup: id=%s, name=%s, driver=%s ...>' %
(self.id, self.name, self.driver.name))
class ECSSecurityGroupAttribute(object):
"""
Security group attribute.
"""
def __init__(self, ip_protocol=None, port_range=None,
source_group_id=None, policy=None, nic_type=None):
self.ip_protocol = ip_protocol
self.port_range = port_range
self.source_group_id = source_group_id
self.policy = policy
self.nic_type = nic_type
def __repr__(self):
return ('<ECSSecurityGroupAttribute: ip_protocol=%s ...>' %
(self.ip_protocol))
class ECSZone(object):
"""
ECSZone used to represent an availability zone in a region.
"""
def __init__(self, id, name, driver=None,
available_resource_types=None,
available_instance_types=None,
available_disk_categories=None):
self.id = id
self.name = name
self.driver = driver
self.available_resource_types = available_resource_types
self.available_instance_types = available_instance_types
self.available_disk_categories = available_disk_categories
def __repr__(self):
return ('<ECSZone: id=%s, name=%s, driver=%s>' %
(self.id, self.name, self.driver))
class InternetChargeType(object):
"""
Internet connection billing types for Aliyun Nodes.
"""
BY_BANDWIDTH = 'PayByBandwidth'
BY_TRAFFIC = 'PayByTraffic'
class DiskCategory(object):
"""
Enum defined disk types supported by Aliyun system and data disks.
"""
CLOUD = 'cloud'
CLOUD_EFFICIENCY = 'cloud_efficiency'
CLOUD_SSD = 'cloud_ssd'
EPHEMERAL_SSD = 'ephemeral_ssd'
class Pagination(object):
"""
Pagination used to describe the multiple pages results.
"""
def __init__(self, total, size, current):
"""
Create a pagination.
:param total: the total count of the results
:param size: the page size of each page
:param current: the current page number, 1-based
"""
self.total = total
self.size = size
self.current = current
def next(self):
"""
Switch to the next page.
:return: the new pagination or None when no more page
:rtype: ``Pagination``
"""
if self.total is None or (self.size * self.current >= self.total):
return None
self.current += 1
return self
def to_dict(self):
return {'PageNumber': self.current,
'PageSize': self.size}
def __repr__(self):
return ('<Pagination total=%d, size=%d, current page=%d>' %
(self.total, self.size, self.current))
class ECSDriver(NodeDriver):
"""
Aliyun ECS node driver.
Used for Aliyun ECS service.
TODO:
Create public IP address
Get guest OS root password
Adjust internet bandwidth settings
Manage security groups and rules
"""
name = 'Aliyun ECS'
website = 'https://www.aliyun.com/product/ecs'
connectionCls = ECSConnection
features = {'create_node': ['password']}
namespace = None
path = '/'
internet_charge_types = InternetChargeType
disk_categories = DiskCategory
NODE_STATE_MAPPING = {
'Starting': NodeState.PENDING,
'Running': NodeState.RUNNING,
'Stopping': NodeState.PENDING,
'Stopped': NodeState.STOPPED
}
VOLUME_STATE_MAPPING = {
'In_use': StorageVolumeState.INUSE,
'Available': StorageVolumeState.AVAILABLE,
'Attaching': StorageVolumeState.ATTACHING,
'Detaching': StorageVolumeState.INUSE,
'Creating': StorageVolumeState.CREATING,
'ReIniting': StorageVolumeState.CREATING}
SNAPSHOT_STATE_MAPPING = {
'progressing': VolumeSnapshotState.CREATING,
'accomplished': VolumeSnapshotState.AVAILABLE,
'failed': VolumeSnapshotState.ERROR}
def list_nodes(self, ex_node_ids=None, ex_filters=None):
"""
List all nodes.
@inherits: :class:`NodeDriver.create_node`
:keyword ex_node_ids: a list of node's ids used to filter nodes.
Only the nodes which's id in this list
will be returned.
:type ex_node_ids: ``list`` of ``str``
:keyword ex_filters: node attribute and value pairs to filter nodes.
Only the nodes which matchs all the pairs will
be returned.
If the filter attribute need a json array value,
use ``list`` object, the driver will convert it.
:type ex_filters: ``dict``
"""
params = {'Action': 'DescribeInstances',
'RegionId': self.region}
if ex_node_ids:
if isinstance(ex_node_ids, list):
params['InstanceIds'] = self._list_to_json_array(ex_node_ids)
else:
raise AttributeError('ex_node_ids should be a list of '
'node ids.')
if ex_filters:
if isinstance(ex_filters, dict):
params.update(ex_filters)
else:
raise AttributeError('ex_filters should be a dict of '
'node attributes.')
nodes = self._request_multiple_pages(self.path, params,
self._to_nodes)
return nodes
def list_sizes(self, location=None):
params = {'Action': 'DescribeInstanceTypes'}
resp_body = self.connection.request(self.path, params).object
size_elements = findall(resp_body, 'InstanceTypes/InstanceType',
namespace=self.namespace)
sizes = [self._to_size(each) for each in size_elements]
return sizes
def list_locations(self):
params = {'Action': 'DescribeRegions'}
resp_body = self.connection.request(self.path, params).object
location_elements = findall(resp_body, 'Regions/Region',
namespace=self.namespace)
locations = [self._to_location(each) for each in location_elements]
return locations
def create_node(self, name, size, image, auth=None,
ex_security_group_id=None, ex_description=None,
ex_internet_charge_type=None,
ex_internet_max_bandwidth_out=None,
ex_internet_max_bandwidth_in=None,
ex_hostname=None, ex_io_optimized=None,
ex_system_disk=None, ex_data_disks=None,
ex_vswitch_id=None, ex_private_ip_address=None,
ex_client_token=None, **kwargs):
"""
@inherits: :class:`NodeDriver.create_node`
:param name: The name for this new node (required)
:type name: ``str``
:param image: The image to use when creating this node (required)
:type image: `NodeImage`
:param size: The size of the node to create (required)
:type size: `NodeSize`
:keyword auth: Initial authentication information for the node
(optional)
:type auth: :class:`NodeAuthSSHKey` or :class:`NodeAuthPassword`
:keyword ex_security_group_id: The id of the security group the
new created node is attached to.
(required)
:type ex_security_group_id: ``str``
:keyword ex_description: A description string for this node (optional)
:type ex_description: ``str``
:keyword ex_internet_charge_type: The internet charge type (optional)
:type ex_internet_charge_type: a ``str`` of 'PayByTraffic'
or 'PayByBandwidth'
:keyword ex_internet_max_bandwidth_out: The max output bandwidth,
in Mbps (optional)
Required for 'PayByTraffic'
internet charge type
:type ex_internet_max_bandwidth_out: a ``int`` in range [0, 100]
a ``int`` in range [1, 100] for
'PayByTraffic' internet charge
type
:keyword ex_internet_max_bandwidth_in: The max input bandwidth,
in Mbps (optional)
:type ex_internet_max_bandwidth_in: a ``int`` in range [1, 200]
default to 200 in server side
:keyword ex_hostname: The hostname for the node (optional)
:type ex_hostname: ``str``
:keyword ex_io_optimized: Whether the node is IO optimized (optional)
:type ex_io_optimized: ``boll``
:keyword ex_system_disk: The system disk for the node (optional)
:type ex_system_disk: ``dict``
:keyword ex_data_disks: The data disks for the node (optional)
:type ex_data_disks: a `list` of `dict`
:keyword ex_vswitch_id: The id of vswitch for a VPC type node
(optional)
:type ex_vswitch_id: ``str``
:keyword ex_private_ip_address: The IP address in private network
(optional)
:type ex_private_ip_address: ``str``
:keyword ex_client_token: A token generated by client to keep
requests idempotency (optional)
:type keyword ex_client_token: ``str``
"""
params = {'Action': 'CreateInstance',
'RegionId': self.region,
'ImageId': image.id,
'InstanceType': size.id,
'InstanceName': name}
if not ex_security_group_id:
raise AttributeError('ex_security_group_id is mandatory')
params['SecurityGroupId'] = ex_security_group_id
if ex_description:
params['Description'] = ex_description
inet_params = self._get_internet_related_params(
ex_internet_charge_type,
ex_internet_max_bandwidth_in,
ex_internet_max_bandwidth_out)
if inet_params:
params.update(inet_params)
if ex_hostname:
params['HostName'] = ex_hostname
if auth:
auth = self._get_and_check_auth(auth)
params['Password'] = auth.password
if ex_io_optimized is not None:
optimized = ex_io_optimized
if not isinstance(optimized, bool):
optimized = str(optimized).lower() == 'true'
params['IoOptimized'] = 'true' if optimized else 'false'
if ex_system_disk:
system_disk = self._get_system_disk(ex_system_disk)
if system_disk:
params.update(system_disk)
if ex_data_disks:
data_disks = self._get_data_disks(ex_data_disks)
if data_disks:
params.update(data_disks)
if ex_vswitch_id:
params['VSwitchId'] = ex_vswitch_id
if ex_private_ip_address:
if not ex_vswitch_id:
raise AttributeError('must provide ex_private_ip_address '
'and ex_vswitch_id at the same time')
else:
params['PrivateIpAddress'] = ex_private_ip_address
if ex_client_token:
params['ClientToken'] = ex_client_token
resp = self.connection.request(self.path, params=params)
node_id = findtext(resp.object, xpath='InstanceId',
namespace=self.namespace)
nodes = self.list_nodes(ex_node_ids=[node_id])
if len(nodes) != 1:
raise LibcloudError('could not find the new created node '
'with id %s. ' % node_id,
driver=self)
node = nodes[0]
self.ex_start_node(node)
self._wait_until_state(nodes, NodeState.RUNNING)
return node
def reboot_node(self, node, ex_force_stop=False):
"""
Reboot the given node
@inherits :class:`NodeDriver.reboot_node`
:keyword ex_force_stop: if ``True``, stop node force (maybe lose data)
otherwise, stop node normally,
default to ``False``
:type ex_force_stop: ``bool``
"""
params = {'Action': 'RebootInstance',
'InstanceId': node.id,
'ForceStop': u(ex_force_stop).lower()}
resp = self.connection.request(self.path, params=params)
return resp.success() and \
self._wait_until_state([node], NodeState.RUNNING)
def destroy_node(self, node):
nodes = self.list_nodes(ex_node_ids=[node.id])
if len(nodes) != 1 and node.id != nodes[0].id:
raise LibcloudError('could not find the node with id %s.'
% node.id)
current = nodes[0]
if current.state == NodeState.RUNNING:
# stop node first
self.ex_stop_node(node)
self._wait_until_state(nodes, NodeState.STOPPED)
params = {'Action': 'DeleteInstance',
'InstanceId': node.id}
resp = self.connection.request(self.path, params)
return resp.success()
def ex_start_node(self, node):
"""
Start node to running state.
:param node: the ``Node`` object to start
:type node: ``Node``
:return: starting operation result.
:rtype: ``bool``
"""
params = {'Action': 'StartInstance',
'InstanceId': node.id}
resp = self.connection.request(self.path, params)
return resp.success() and \
self._wait_until_state([node], NodeState.RUNNING)
def ex_stop_node(self, node, ex_force_stop=False):
"""
Stop a running node.
:param node: The node to stop
:type node: :class:`Node`
:keyword ex_force_stop: if ``True``, stop node force (maybe lose data)
otherwise, stop node normally,
default to ``False``
:type ex_force_stop: ``bool``
:return: stopping operation result.
:rtype: ``bool``
"""
params = {'Action': 'StopInstance',
'InstanceId': node.id,
'ForceStop': u(ex_force_stop).lower()}
resp = self.connection.request(self.path, params)
return resp.success() and \
self._wait_until_state([node], NodeState.STOPPED)
def ex_create_security_group(self, description=None, client_token=None):
"""
Create a new security group.
:keyword description: security group description
:type description: ``unicode``
:keyword client_token: a token generated by client to identify
each request.
:type client_token: ``str``
"""
params = {'Action': 'CreateSecurityGroup',
'RegionId': self.region}
if description:
params['Description'] = description
if client_token:
params['ClientToken'] = client_token
resp = self.connection.request(self.path, params)
return findtext(resp.object, 'SecurityGroupId',
namespace=self.namespace)
def ex_delete_security_group_by_id(self, group_id=None):
"""
Delete a new security group.
:keyword group_id: security group id
:type group_id: ``str``
"""
params = {'Action': 'DeleteSecurityGroup',
'RegionId': self.region,
'SecurityGroupId': group_id}
resp = self.connection.request(self.path, params)
return resp.success()
def ex_list_security_groups(self, ex_filters=None):
"""
List security groups in the current region.
:keyword ex_filters: security group attributes to filter results.
:type ex_filters: ``dict``
:return: a list of defined security groups
:rtype: ``list`` of ``ECSSecurityGroup``
"""
params = {'Action': 'DescribeSecurityGroups',
'RegionId': self.region}
if ex_filters and isinstance(ex_filters, dict):
ex_filters.update(params)
params = ex_filters
def _parse_response(resp_object):
sg_elements = findall(resp_object, 'SecurityGroups/SecurityGroup',
namespace=self.namespace)
sgs = [self._to_security_group(el) for el in sg_elements]
return sgs
return self._request_multiple_pages(self.path, params,
_parse_response)
def ex_list_security_group_attributes(self, group_id=None,
nic_type='internet'):
"""
List security group attributes in the current region.
:keyword group_id: security group id.
:type ex_filters: ``str``
:keyword nic_type: internet|intranet.
:type nic_type: ``str``
:return: a list of defined security group Attributes
:rtype: ``list`` of ``ECSSecurityGroupAttribute``
"""
params = {'Action': 'DescribeSecurityGroupAttribute',
'RegionId': self.region,
'NicType': nic_type}
if group_id is None:
raise AttributeError('group_id is required')
params['SecurityGroupId'] = group_id
resp_object = self.connection.request(self.path, params).object
sga_elements = findall(resp_object, 'Permissions/Permission',
namespace=self.namespace)
return [self._to_security_group_attribute(el) for el in sga_elements]
def ex_list_zones(self, region_id=None):
"""
List availability zones in the given region or the current region.
:keyword region_id: the id of the region to query zones from
:type region_id: ``str``
:return: list of zones
:rtype: ``list`` of ``ECSZone``
"""
params = {'Action': 'DescribeZones'}
if region_id:
params['RegionId'] = region_id
else:
params['RegionId'] = self.region
resp_body = self.connection.request(self.path, params).object
zone_elements = findall(resp_body, 'Zones/Zone',
namespace=self.namespace)
zones = [self._to_zone(el) for el in zone_elements]
return zones
##
# Volume and snapshot management methods
##
def list_volumes(self, ex_volume_ids=None, ex_filters=None):
"""
List all volumes.
@inherits: :class:`NodeDriver.list_volumes`
:keyword ex_volume_ids: a list of volume's ids used to filter volumes.
Only the volumes which's id in this list
will be returned.
:type ex_volume_ids: ``list`` of ``str``
:keyword ex_filters: volume attribute and value pairs to filter
volumes. Only the volumes which matchs all will
be returned.
If the filter attribute need a json array value,
use ``list`` object, the driver will convert it.
:type ex_filters: ``dict``
"""
params = {'Action': 'DescribeDisks',
'RegionId': self.region}
if ex_volume_ids:
if isinstance(ex_volume_ids, list):
params['DiskIds'] = self._list_to_json_array(ex_volume_ids)
else:
raise AttributeError('ex_volume_ids should be a list of '
'volume ids.')
if ex_filters:
if not isinstance(ex_filters, dict):
raise AttributeError('ex_filters should be a dict of '
'volume attributes.')
else:
for key in ex_filters.keys():
params[key] = ex_filters[key]
def _parse_response(resp_object):
disk_elements = findall(resp_object, 'Disks/Disk',
namespace=self.namespace)
volumes = [self._to_volume(each) for each in disk_elements]
return volumes
return self._request_multiple_pages(self.path, params,
_parse_response)
def list_volume_snapshots(self, volume, ex_snapshot_ids=[],
ex_filters=None):
"""
List snapshots for a storage volume.
@inherites :class:`NodeDriver.list_volume_snapshots`
:keyword ex_snapshot_ids: a list of snapshot ids to filter the
snapshots returned.
:type ex_snapshot_ids: ``list`` of ``str``
:keyword ex_filters: snapshot attribute and value pairs to filter
snapshots. Only the snapshot which matchs all
the pairs will be returned.
If the filter attribute need a json array value,
use ``list`` object, the driver will convert it.
:type ex_filters: ``dict``
"""
params = {'Action': 'DescribeSnapshots',
'RegionId': self.region}
if volume:
params['DiskId'] = volume.id
if ex_snapshot_ids and isinstance(ex_snapshot_ids, list):
params['SnapshotIds'] = self._list_to_json_array(ex_snapshot_ids)
if ex_filters and isinstance(ex_filters, dict):
for key in ex_filters.keys():
params[key] = ex_filters[key]
def _parse_response(resp_body):
snapshot_elements = findall(resp_body, 'Snapshots/Snapshot',
namespace=self.namespace)
snapshots = [self._to_snapshot(each) for each in snapshot_elements]
return snapshots
return self._request_multiple_pages(self.path, params,
_parse_response)
def create_volume(self, size, name, location=None, snapshot=None,
ex_zone_id=None, ex_description=None,
ex_disk_category=None, ex_client_token=None):
"""
Create a new volume.
@inherites :class:`NodeDriver.create_volume`
:keyword ex_zone_id: the availability zone id (required)
:type ex_zone_id: ``str``
:keyword ex_description: volume description
:type ex_description: ``unicode``
:keyword ex_disk_category: disk category for data disk
:type ex_disk_category: ``str``
:keyword ex_client_token: a token generated by client to identify
each request.
:type ex_client_token: ``str``
"""
params = {'Action': 'CreateDisk',
'RegionId': self.region,
'DiskName': name,
'Size': size}
if ex_zone_id is None:
raise AttributeError('ex_zone_id is required')
params['ZoneId'] = ex_zone_id
if snapshot is not None and isinstance(snapshot, VolumeSnapshot):
params['SnapshotId'] = snapshot.id
if ex_description:
params['Description'] = ex_description
if ex_disk_category:
params['DiskCategory'] = ex_disk_category
if ex_client_token:
params['ClientToken'] = ex_client_token
resp = self.connection.request(self.path, params).object
volume_id = findtext(resp, 'DiskId', namespace=self.namespace)
volumes = self.list_volumes(ex_volume_ids=[volume_id])
if len(volumes) != 1:
raise LibcloudError('could not find the new create volume '
'with id %s.' % volume_id,
driver=self)
return volumes[0]
def create_volume_snapshot(self, volume, name=None, ex_description=None,
ex_client_token=None):
"""
Creates a snapshot of the storage volume.
@inherits :class:`NodeDriver.create_volume_snapshot`
:keyword ex_description: description of the snapshot.
:type ex_description: ``unicode``
:keyword ex_client_token: a token generated by client to identify
each request.
:type ex_client_token: ``str``
"""
params = {'Action': 'CreateSnapshot',
'DiskId': volume.id}
if name:
params['SnapshotName'] = name
if ex_description:
params['Description'] = ex_description
if ex_client_token:
params['ClientToken'] = ex_client_token
snapshot_elements = self.connection.request(self.path, params).object
snapshot_id = findtext(snapshot_elements, 'SnapshotId',
namespace=self.namespace)
snapshots = self.list_volume_snapshots(volume=None,
ex_snapshot_ids=[snapshot_id])
if len(snapshots) != 1:
raise LibcloudError('could not find new created snapshot with '
'id %s.' % snapshot_id, driver=self)
return snapshots[0]
def attach_volume(self, node, volume, device=None,
ex_delete_with_instance=None):
"""
Attaches volume to node.
@inherits :class:`NodeDriver.attach_volume`
:keyword device: device path allocated for this attached volume
:type device: ``str`` between /dev/xvdb to xvdz,
if empty, allocated by the system
:keyword ex_delete_with_instance: if to delete this volume when the
instance is deleted.
:type ex_delete_with_instance: ``bool``
"""
params = {'Action': 'AttachDisk',
'InstanceId': node.id,
'DiskId': volume.id}
if device:
params['Device'] = device
if ex_delete_with_instance:
params['DeleteWithInstance'] = \
str(bool(ex_delete_with_instance)).lower()
resp = self.connection.request(self.path, params)
return resp.success()
def detach_volume(self, volume, ex_instance_id=None):
"""
Detaches a volume from a node.
@inherits :class:`NodeDriver.detach_volume`
:keyword ex_instance_id: the id of the instance from which the volume
is detached.
:type ex_instance_id: ``str``
"""
params = {'Action': 'DetachDisk',
'DiskId': volume.id}
if ex_instance_id:
params['InstanceId'] = ex_instance_id
else:
volumes = self.list_volumes(ex_volume_ids=[volume.id])
if len(volumes) != 1:
raise AttributeError('could not find the instance id '
'the volume %s attached to, '
'ex_instance_id is required.' %
volume.id)
params['InstanceId'] = volumes[0].extra['instance_id']
resp = self.connection.request(self.path, params)
return resp.success()
def destroy_volume(self, volume):
params = {'Action': 'DeleteDisk',
'DiskId': volume.id}
volumes = self.list_volumes(ex_volume_ids=[volume.id])
if len(volumes) != 1:
raise LibcloudError('could not find the volume with id %s.' %
volume.id,
driver=self)
if volumes[0].state != StorageVolumeState.AVAILABLE:
raise LibcloudError('only volume in AVAILABLE state could be '
'destroyed.', driver=self)
resp = self.connection.request(self.path, params)
return resp.success()
def destroy_volume_snapshot(self, snapshot):
params = {'Action': 'DeleteSnapshot'}
if snapshot and isinstance(snapshot, VolumeSnapshot):
params['SnapshotId'] = snapshot.id
else:
raise AttributeError('snapshot is required and must be a '
'VolumeSnapshot')
resp = self.connection.request(self.path, params)
return resp.success()
##
# Image management methods
##
def list_images(self, location=None, ex_image_ids=None, ex_filters=None):
"""
List images on a provider.
@inherits :class:`NodeDriver.list_images`
:keyword ex_image_ids: a list of image ids to filter the images to
be returned.
:type ex_image_ids: ``list`` of ``str``
:keyword ex_filters: image attribute and value pairs to filter
images. Only the image which matchs all
the pairs will be returned.
If the filter attribute need a json array value,
use ``list`` object, the driver will convert it.
:type ex_filters: ``dict``
"""
if location and isinstance(location, NodeLocation):
region = location.id
else:
region = self.region
params = {'Action': 'DescribeImages',
'RegionId': region}
if ex_image_ids:
if isinstance(ex_image_ids, list):
params['ImageId'] = ','.join(ex_image_ids)
else:
raise AttributeError('ex_image_ids should be a list of '
'image ids')
if ex_filters and isinstance(ex_filters, dict):
for key in ex_filters.keys():
params[key] = ex_filters[key]
def _parse_response(resp_body):
image_elements = findall(resp_body, 'Images/Image',
namespace=self.namespace)
images = [self._to_image(each) for each in image_elements]
return images
return self._request_multiple_pages(self.path, params,
_parse_response)
def create_image(self, node, name, description=None, ex_snapshot_id=None,
ex_image_version=None, ex_client_token=None):
"""
Creates an image from a system disk snapshot.
@inherits :class:`NodeDriver.create_image`
:keyword ex_snapshot_id: the id of the snapshot to create the image.
(required)
:type ex_snapshot_id: ``str``
:keyword ex_image_version: the version number of the image
:type ex_image_version: ``str``
:keyword ex_client_token: a token generated by client to identify
each request.
:type ex_client_token: ``str``
"""
params = {'Action': 'CreateImage',
'RegionId': self.region}
if name:
params['ImageName'] = name
if description:
params['Description'] = description
if ex_snapshot_id:
params['SnapshotId'] = ex_snapshot_id
else:
raise AttributeError('ex_snapshot_id is required')
if ex_image_version:
params['ImageVersion'] = ex_image_version
if ex_client_token:
params['ClientToken'] = ex_client_token
resp = self.connection.request(self.path, params)
image_id = findtext(resp.object, 'ImageId', namespace=self.namespace)
return self.get_image(image_id=image_id)
def delete_image(self, node_image):
params = {'Action': 'DeleteImage',
'RegionId': self.region,
'ImageId': node_image.id}
resp = self.connection.request(self.path, params)
return resp.success()
def get_image(self, image_id, ex_region_id=None):
if ex_region_id:
region = ex_region_id
else:
region = self.region
location = NodeLocation(id=region, name=None, country=None,
driver=self)
images = self.list_images(location, ex_image_ids=[image_id])
if len(images) != 1:
raise LibcloudError('could not find the image with id %s' %
image_id,
driver=self)
return images[0]
def copy_image(self, source_region, node_image, name, description=None,
ex_destination_region_id=None, ex_client_token=None):
"""
Copies an image from a source region to the destination region.
If not provide a destination region, default to the current region.
@inherits :class:`NodeDriver.copy_image`
:keyword ex_destination_region_id: id of the destination region
:type ex_destination_region_id: ``str``
:keyword ex_client_token: a token generated by client to identify
each request.
:type ex_client_token: ``str``
"""
params = {'Action': 'CopyImage',
'RegionId': source_region,
'ImageId': node_image.id}
if ex_destination_region_id is not None:
params['DestinationRegionId'] = ex_destination_region_id
else:
params['DestinationRegionId'] = self.region
if name:
params['DestinationImageName'] = name
if description:
params['DestinationDescription'] = description
if ex_client_token:
params['ClientToken'] = ex_client_token
resp = self.connection.request(self.path, params)
image_id = findtext(resp.object, 'ImageId', namespace=self.namespace)
return self.get_image(image_id=image_id)
def _to_nodes(self, object):
"""
Convert response to Node object list
:param object: parsed response object
:return: a list of ``Node``
:rtype: ``list``
"""
node_elements = findall(object, 'Instances/Instance', self.namespace)
return [self._to_node(el) for el in node_elements]
def _to_node(self, instance):
"""
Convert an InstanceAttributesType object to ``Node`` object
:param instance: a xml element represents an instance
:return: a ``Node`` object
:rtype: ``Node``
"""
_id = findtext(element=instance, xpath='InstanceId',
namespace=self.namespace)
name = findtext(element=instance, xpath='InstanceName',
namespace=self.namespace)
instance_status = findtext(element=instance, xpath='Status',
namespace=self.namespace)
state = self.NODE_STATE_MAPPING.get(instance_status, NodeState.UNKNOWN)
def _get_ips(ip_address_els):
return [each.text for each in ip_address_els]
public_ip_els = findall(element=instance,
xpath='PublicIpAddress/IpAddress',
namespace=self.namespace)
public_ips = _get_ips(public_ip_els)
private_ip_els = findall(element=instance,
xpath='InnerIpAddress/IpAddress',
namespace=self.namespace)
private_ips = _get_ips(private_ip_els)
# Extra properties
extra = self._get_extra_dict(instance,
RESOURCE_EXTRA_ATTRIBUTES_MAP['node'])
extra['vpc_attributes'] = self._get_vpc_attributes(instance)
extra['eip_address'] = self._get_eip_address(instance)
extra['operation_locks'] = self._get_operation_locks(instance)
node = Node(id=_id, name=name, state=state,
public_ips=public_ips, private_ips=private_ips,
driver=self.connection.driver, extra=extra)
return node
def _get_extra_dict(self, element, mapping):
"""
Extract attributes from the element based on rules provided in the
mapping dictionary.
:param element: Element to parse the values from.
:type element: xml.etree.ElementTree.Element.
:param mapping: Dictionary with the extra layout
:type node: :class:`Node`
:rtype: ``dict``
"""
extra = {}
for attribute, values in mapping.items():
transform_func = values['transform_func']
value = findattr(element=element,
xpath=values['xpath'],
namespace=self.namespace)
if value:
try:
extra[attribute] = transform_func(value)
except Exception:
extra[attribute] = None
else:
extra[attribute] = value
return extra
def _get_internet_related_params(self, ex_internet_charge_type,
ex_internet_max_bandwidth_in,
ex_internet_max_bandwidth_out):
params = {}
if ex_internet_charge_type:
params['InternetChargeType'] = ex_internet_charge_type
if ex_internet_charge_type.lower() == 'paybytraffic':
if ex_internet_max_bandwidth_out:
params['InternetMaxBandwidthOut'] = \
ex_internet_max_bandwidth_out
else:
raise AttributeError('ex_internet_max_bandwidth_out is '
'mandatory for PayByTraffic internet'
' charge type.')
if ex_internet_max_bandwidth_in:
params['InternetMaxBandwidthIn'] = \
ex_internet_max_bandwidth_in
return params
def _get_system_disk(self, ex_system_disk):
if not isinstance(ex_system_disk, dict):
raise AttributeError('ex_system_disk is not a dict')
sys_disk_dict = ex_system_disk
key_base = 'SystemDisk.'
# TODO(samsong8610): Use a type instead of dict
mappings = {'category': 'Category',
'disk_name': 'DiskName',
'description': 'Description'}
params = {}
for attr in mappings.keys():
if attr in sys_disk_dict:
params[key_base + mappings[attr]] = sys_disk_dict[attr]
return params
def _get_data_disks(self, ex_data_disks):
if isinstance(ex_data_disks, dict):
data_disks = [ex_data_disks]
elif isinstance(ex_data_disks, list):
data_disks = ex_data_disks
else:
raise AttributeError('ex_data_disks should be a list of dict')
# TODO(samsong8610): Use a type instead of dict
mappings = {'size': 'Size',
'category': 'Category',
'snapshot_id': 'SnapshotId',
'disk_name': 'DiskName',
'description': 'Description',
'device': 'Device',
'delete_with_instance': 'DeleteWithInstance'}
params = {}
for idx, disk in enumerate(data_disks):
key_base = 'DataDisk.{0}.'.format(idx + 1)
for attr in mappings.keys():
if attr in disk:
if attr == 'delete_with_instance':
# Convert bool value to str
value = str(disk[attr]).lower()
else:
value = disk[attr]
params[key_base + mappings[attr]] = value
return params
def _get_vpc_attributes(self, instance):
vpcs = findall(instance, xpath='VpcAttributes',
namespace=self.namespace)
if len(vpcs) <= 0:
return None
return self._get_extra_dict(
vpcs[0], RESOURCE_EXTRA_ATTRIBUTES_MAP['vpc_attributes'])
def _get_eip_address(self, instance):
eips = findall(instance, xpath='EipAddress',
namespace=self.namespace)
if len(eips) <= 0:
return None
return self._get_extra_dict(
eips[0], RESOURCE_EXTRA_ATTRIBUTES_MAP['eip_address_associate'])
def _get_operation_locks(self, instance):
locks = findall(instance, xpath='OperationLocks',
namespace=self.namespace)
if len(locks) <= 0:
return None
return self._get_extra_dict(
locks[0], RESOURCE_EXTRA_ATTRIBUTES_MAP['operation_locks'])
def _wait_until_state(self, nodes, state, wait_period=3, timeout=600):
"""
Block until the provided nodes are in the desired state.
:param nodes: List of nodes to wait for
:type nodes: ``list`` of :class:`.Node`
:param state: desired state
:type state: ``NodeState``
:param wait_period: How many seconds to wait between each loop
iteration. (default is 3)
:type wait_period: ``int``
:param timeout: How many seconds to wait before giving up.
(default is 600)
:type timeout: ``int``
:return: if the nodes are in the desired state.
:rtype: ``bool``
"""
start = time.time()
end = start + timeout
node_ids = [node.id for node in nodes]
while(time.time() < end):
matched_nodes = self.list_nodes(ex_node_ids=node_ids)
if len(matched_nodes) > len(node_ids):
found_ids = [node.id for node in matched_nodes]
msg = ('found multiple nodes with same ids, '
'desired ids: %(ids)s, found ids: %(found_ids)s' %
{'ids': node_ids, 'found_ids': found_ids})
raise LibcloudError(value=msg, driver=self)
desired_nodes = [node for node in matched_nodes
if node.state == state]
if len(desired_nodes) == len(node_ids):
return True
else:
time.sleep(wait_period)
continue
raise LibcloudError(value='Timed out after %s seconds' % (timeout),
driver=self)
def _to_volume(self, element):
_id = findtext(element, 'DiskId', namespace=self.namespace)
name = findtext(element, 'DiskName', namespace=self.namespace)
size = int(findtext(element, 'Size', namespace=self.namespace))
status_str = findtext(element, 'Status', namespace=self.namespace)
status = self.VOLUME_STATE_MAPPING.get(status_str,
StorageVolumeState.UNKNOWN)
extra = self._get_extra_dict(element,
RESOURCE_EXTRA_ATTRIBUTES_MAP['volume'])
extra['operation_locks'] = self._get_operation_locks(element)
return StorageVolume(_id, name, size, self, state=status, extra=extra)
def _list_to_json_array(self, value):
try:
return json.dumps(value)
except Exception:
raise AttributeError('could not convert list to json array')
def _to_snapshot(self, element):
_id = findtext(element, 'SnapshotId', namespace=self.namespace)
created = findtext(element, 'CreationTime', namespace=self.namespace)
status_str = findtext(element, 'Status', namespace=self.namespace)
state = self.SNAPSHOT_STATE_MAPPING.get(status_str,
VolumeSnapshotState.UNKNOWN)
extra = self._get_extra_dict(element,
RESOURCE_EXTRA_ATTRIBUTES_MAP['snapshot'])
return VolumeSnapshot(id=_id, driver=self, extra=extra,
created=created, state=state)
def _to_size(self, element):
_id = findtext(element, 'InstanceTypeId', namespace=self.namespace)
ram = float(findtext(element, 'MemorySize', namespace=self.namespace))
extra = {}
extra['cpu_core_count'] = int(findtext(element, 'CpuCoreCount',
namespace=self.namespace))
extra['instance_type_family'] = findtext(element, 'InstanceTypeFamily',
namespace=self.namespace)
return NodeSize(id=_id, name=_id, ram=ram, disk=None, bandwidth=None,
price=None, driver=self, extra=extra)
def _to_location(self, element):
_id = findtext(element, 'RegionId', namespace=self.namespace)
localname = findtext(element, 'LocalName', namespace=self.namespace)
return NodeLocation(id=_id, name=localname, country=None, driver=self)
def _to_image(self, element):
_id = findtext(element, 'ImageId', namespace=self.namespace)
name = findtext(element, 'ImageName', namespace=self.namespace)
extra = self._get_extra_dict(element,
RESOURCE_EXTRA_ATTRIBUTES_MAP['image'])
extra['disk_device_mappings'] = self._get_disk_device_mappings(
element.find('DiskDeviceMappings'))
return NodeImage(id=_id, name=name, driver=self, extra=extra)
def _get_disk_device_mappings(self, element):
if element is None:
return None
mapping_element = element.find('DiskDeviceMapping')
if mapping_element is not None:
return self._get_extra_dict(
mapping_element,
RESOURCE_EXTRA_ATTRIBUTES_MAP['disk_device_mapping'])
return None
def _to_security_group(self, element):
_id = findtext(element, 'SecurityGroupId', namespace=self.namespace)
name = findtext(element, 'SecurityGroupName',
namespace=self.namespace)
description = findtext(element, 'Description',
namespace=self.namespace)
vpc_id = findtext(element, 'VpcId', namespace=self.namespace)
creation_time = findtext(element, 'CreationTime',
namespace=self.namespace)
return ECSSecurityGroup(_id, name, description=description,
driver=self, vpc_id=vpc_id,
creation_time=creation_time)
def _to_security_group_attribute(self, element):
ip_protocol = findtext(element, 'IpProtocol', namespace=self.namespace)
port_range = findtext(element, 'PortRange', namespace=self.namespace)
source_group_id = findtext(element, 'SourceGroupId',
namespace=self.namespace)
policy = findtext(element, 'Policy', namespace=self.namespace)
nic_type = findtext(element, 'NicType', namespace=self.namespace)
return ECSSecurityGroupAttribute(ip_protocol=ip_protocol,
port_range=port_range,
source_group_id=source_group_id,
policy=policy, nic_type=nic_type)
def _to_zone(self, element):
_id = findtext(element, 'ZoneId', namespace=self.namespace)
local_name = findtext(element, 'LocalName', namespace=self.namespace)
resource_types = findall(element,
'AvailableResourceCreation/ResourceTypes',
namespace=self.namespace)
instance_types = findall(element,
'AvailableInstanceTypes/InstanceTypes',
namespace=self.namespace)
disk_categories = findall(element,
'AvailableDiskCategories/DiskCategories',
namespace=self.namespace)
def _text(element):
return element.text
return ECSZone(id=_id, name=local_name, driver=self,
available_resource_types=list(
map(_text, resource_types)),
available_instance_types=list(
map(_text, instance_types)),
available_disk_categories=list(
map(_text, disk_categories)))
def _get_pagination(self, element):
page_number = int(findtext(element, 'PageNumber'))
total_count = int(findtext(element, 'TotalCount'))
page_size = int(findtext(element, 'PageSize'))
return Pagination(total=total_count, size=page_size,
current=page_number)
def _request_multiple_pages(self, path, params, parse_func):
"""
Request all resources by multiple pages.
:param path: the resource path
:type path: ``str``
:param params: the query parameters
:type params: ``dict``
:param parse_func: the function object to parse the response body
:param type: ``function``
:return: list of resource object, if not found any, return []
:rtype: ``list``
"""
results = []
while True:
one_page = self.connection.request(path, params).object
resources = parse_func(one_page)
results += resources
pagination = self._get_pagination(one_page)
if pagination.next() is None:
break
params.update(pagination.to_dict())
return results
| NexusIS/libcloud | libcloud/compute/drivers/ecs.py | Python | apache-2.0 | 59,910 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class IpAllocationsOperations(object):
"""IpAllocationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
ip_allocation_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipAllocationName': self._serialize.url("ip_allocation_name", ip_allocation_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/IpAllocations/{ipAllocationName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
ip_allocation_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified IpAllocation.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ip_allocation_name: The name of the IpAllocation.
:type ip_allocation_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
ip_allocation_name=ip_allocation_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipAllocationName': self._serialize.url("ip_allocation_name", ip_allocation_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/IpAllocations/{ipAllocationName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
ip_allocation_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.IpAllocation"
"""Gets the specified IpAllocation by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ip_allocation_name: The name of the IpAllocation.
:type ip_allocation_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IpAllocation, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_05_01.models.IpAllocation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpAllocation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipAllocationName': self._serialize.url("ip_allocation_name", ip_allocation_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('IpAllocation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/IpAllocations/{ipAllocationName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
ip_allocation_name, # type: str
parameters, # type: "_models.IpAllocation"
**kwargs # type: Any
):
# type: (...) -> "_models.IpAllocation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpAllocation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipAllocationName': self._serialize.url("ip_allocation_name", ip_allocation_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'IpAllocation')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('IpAllocation', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('IpAllocation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/IpAllocations/{ipAllocationName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
ip_allocation_name, # type: str
parameters, # type: "_models.IpAllocation"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.IpAllocation"]
"""Creates or updates an IpAllocation in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ip_allocation_name: The name of the IpAllocation.
:type ip_allocation_name: str
:param parameters: Parameters supplied to the create or update virtual network operation.
:type parameters: ~azure.mgmt.network.v2021_05_01.models.IpAllocation
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either IpAllocation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2021_05_01.models.IpAllocation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpAllocation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
ip_allocation_name=ip_allocation_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('IpAllocation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipAllocationName': self._serialize.url("ip_allocation_name", ip_allocation_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/IpAllocations/{ipAllocationName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
ip_allocation_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.IpAllocation"
"""Updates a IpAllocation tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ip_allocation_name: The name of the IpAllocation.
:type ip_allocation_name: str
:param parameters: Parameters supplied to update IpAllocation tags.
:type parameters: ~azure.mgmt.network.v2021_05_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IpAllocation, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_05_01.models.IpAllocation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpAllocation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipAllocationName': self._serialize.url("ip_allocation_name", ip_allocation_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('IpAllocation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/IpAllocations/{ipAllocationName}'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.IpAllocationListResult"]
"""Gets all IpAllocations in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IpAllocationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_05_01.models.IpAllocationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpAllocationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('IpAllocationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/IpAllocations'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.IpAllocationListResult"]
"""Gets all IpAllocations in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IpAllocationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_05_01.models.IpAllocationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpAllocationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('IpAllocationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/IpAllocations'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_05_01/operations/_ip_allocations_operations.py | Python | mit | 27,202 |
'''
Created on Jul 18, 2017
@author: I310003
'''
| BlessedAndy/Programming-Foundations-with-Python | Programming Foundations with Python/src/cn/careerwinner/sap/report_scheduler.py | Python | apache-2.0 | 55 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cms', '0015_auto_20160421_0000'),
]
operations = [
migrations.CreateModel(
name='ArticleListItemPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('title', models.CharField(max_length=100)),
('description', models.TextField(blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| okfn/foundation | article_list_item/migrations/0001_initial.py | Python | mit | 749 |
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
"""An extension that ensures that given features are present."""
import sys
import dnf.rpm
if __name__ == '__main__':
FTR_SPECS = {'hawkey-0.5.3-1.fc21.i686'} # <-- SET YOUR FEATURES HERE.
RPM_SPECS = {'./hawkey-0.5.3-1.fc21.i686.rpm'} # <-- SET YOUR RPMS HERE.
GRP_SPECS = {'kde-desktop'} # <-- SET YOUR GROUPS HERE.
with dnf.Base() as base:
# Substitutions are needed for correct interpretation of repo files.
RELEASEVER = dnf.rpm.detect_releasever(base.conf.installroot)
base.conf.substitutions['releasever'] = RELEASEVER
# Repositories are needed if we want to install anything.
base.read_all_repos()
# A sack is required by marking methods and dependency resolving.
base.fill_sack()
# Feature marking methods set the user request.
for ftr_spec in FTR_SPECS:
try:
base.install(ftr_spec)
except dnf.exceptions.MarkingError:
sys.exit('Feature(s) cannot be found: ' + ftr_spec)
# Package marking methods set the user request.
for rpm_spec in RPM_SPECS:
try:
base.package_install(base.add_remote_rpm(rpm_spec))
except IOError:
sys.exit('RPM cannot be loaded: ' + rpm_spec)
# Comps data reading initializes the base.comps attribute.
if GRP_SPECS:
base.read_comps()
# Group marking methods set the user request.
for grp_spec in GRP_SPECS:
group = base.comps.group_by_pattern(grp_spec)
if not group:
sys.exit('Group cannot be found: ' + grp_spec)
base.group_install(group, ['mandatory', 'default'])
# Resolving finds a transaction that allows the packages installation.
try:
base.resolve()
except dnf.exceptions.DepsolveError:
sys.exit('Dependencies cannot be resolved.')
# The packages to be installed must be downloaded first.
try:
base.download_packages(base.transaction.install_set)
except dnf.exceptions.DownloadError:
sys.exit('Required package cannot be downloaded.')
# The request can finally be fulfilled.
base.do_transaction()
| atodorov/dnf | doc/examples/install_extension.py | Python | gpl-2.0 | 3,221 |
"""
Tests for the base connection class
"""
from unittest import TestCase
import six
from pynamodb.connection import Connection
from pynamodb.exceptions import (
TableError, DeleteError, UpdateError, PutError, GetError, ScanError, QueryError)
from pynamodb.constants import DEFAULT_REGION
from .data import DESCRIBE_TABLE_DATA, GET_ITEM_DATA, LIST_TABLE_DATA
if six.PY3:
from unittest.mock import patch
else:
from mock import patch
from .response import HttpBadRequest, HttpOK, HttpUnavailable
PATCH_METHOD = 'botocore.operation.Operation.call'
class ConnectionTestCase(TestCase):
"""
Tests for the base connection class
"""
def setUp(self):
self.test_table_name = 'ci-table'
self.region = DEFAULT_REGION
def test_create_connection(self):
"""
Connection()
"""
conn = Connection()
self.assertIsNotNone(conn)
conn = Connection(host='foo-host')
self.assertIsNotNone(conn.endpoint)
self.assertIsNotNone(conn)
self.assertEqual(repr(conn), "Connection<{0}>".format(conn.endpoint.host))
def test_create_table(self):
"""
Connection.create_table
"""
conn = Connection(self.region)
kwargs = {
'read_capacity_units': 1,
'write_capacity_units': 1,
}
self.assertRaises(ValueError, conn.create_table, self.test_table_name, **kwargs)
kwargs['attribute_definitions'] = [
{
'attribute_name': 'key1',
'attribute_type': 'S'
},
{
'attribute_name': 'key2',
'attribute_type': 'S'
}
]
self.assertRaises(ValueError, conn.create_table, self.test_table_name, **kwargs)
kwargs['key_schema'] = [
{
'attribute_name': 'key1',
'key_type': 'hash'
},
{
'attribute_name': 'key2',
'key_type': 'range'
}
]
params = {
'table_name': 'ci-table',
'provisioned_throughput': {
'WriteCapacityUnits': 1,
'ReadCapacityUnits': 1
},
'attribute_definitions': [
{
'AttributeType': 'S',
'AttributeName': 'key1'
},
{
'AttributeType': 'S',
'AttributeName': 'key2'
}
],
'key_schema': [
{
'KeyType': 'HASH',
'AttributeName': 'key1'
},
{
'KeyType': 'RANGE',
'AttributeName': 'key2'
}
]
}
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), None
self.assertRaises(TableError, conn.create_table, self.test_table_name, **kwargs)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
conn.create_table(
self.test_table_name,
**kwargs
)
self.assertEqual(req.call_args[1], params)
kwargs['global_secondary_indexes'] = [
{
'index_name': 'alt-index',
'key_schema': [
{
'KeyType': 'HASH',
'AttributeName': 'AltKey'
}
],
'projection': {
'ProjectionType': 'KEYS_ONLY'
},
'provisioned_throughput': {
'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1,
},
}
]
params['global_secondary_indexes'] = [{'IndexName': 'alt-index', 'Projection': {'ProjectionType': 'KEYS_ONLY'},
'KeySchema': [{'AttributeName': 'AltKey', 'KeyType': 'HASH'}],
'ProvisionedThroughput': {'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1}}]
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
conn.create_table(
self.test_table_name,
**kwargs
)
self.assertEqual(req.call_args[1], params)
del(kwargs['global_secondary_indexes'])
del(params['global_secondary_indexes'])
kwargs['local_secondary_indexes'] = [
{
'index_name': 'alt-index',
'projection': {
'ProjectionType': 'KEYS_ONLY'
},
'key_schema': [
{
'AttributeName': 'AltKey', 'KeyType': 'HASH'
}
],
'provisioned_throughput': {
'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1
}
}
]
params['local_secondary_indexes'] = [
{
'Projection': {
'ProjectionType': 'KEYS_ONLY'
},
'KeySchema': [
{
'KeyType': 'HASH',
'AttributeName': 'AltKey'
}
],
'IndexName': 'alt-index'
}
]
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
conn.create_table(
self.test_table_name,
**kwargs
)
self.assertEqual(req.call_args[1], params)
def test_delete_table(self):
"""
Connection.delete_table
"""
params = {'table_name': 'ci-table'}
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
conn = Connection(self.region)
conn.delete_table(self.test_table_name)
kwargs = req.call_args[1]
self.assertEqual(kwargs, params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), None
conn = Connection(self.region)
self.assertRaises(TableError, conn.delete_table, self.test_table_name)
def test_update_table(self):
"""
Connection.update_table
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
conn = Connection(self.region)
params = {
'provisioned_throughput': {
'WriteCapacityUnits': 2,
'ReadCapacityUnits': 2
},
'table_name': 'ci-table'
}
conn.update_table(
self.test_table_name,
read_capacity_units=2,
write_capacity_units=2
)
self.assertEqual(req.call_args[1], params)
self.assertRaises(ValueError, conn.update_table, self.test_table_name, read_capacity_units=2)
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), None
conn = Connection(self.region)
self.assertRaises(
TableError,
conn.update_table,
self.test_table_name,
read_capacity_units=2,
write_capacity_units=2)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
conn = Connection(self.region)
global_secondary_index_updates = [
{
"index_name": "foo-index",
"read_capacity_units": 2,
"write_capacity_units": 2
}
]
params = {
'table_name': 'ci-table',
'provisioned_throughput': {
'ReadCapacityUnits': 2,
'WriteCapacityUnits': 2,
},
'global_secondary_index_updates': [
{
'Update': {
'IndexName': 'foo-index',
'ProvisionedThroughput': {
'ReadCapacityUnits': 2,
'WriteCapacityUnits': 2,
}
}
}
]
}
conn.update_table(
self.test_table_name,
read_capacity_units=2,
write_capacity_units=2,
global_secondary_index_updates=global_secondary_index_updates
)
self.assertEqual(req.call_args[1], params)
def test_describe_table(self):
"""
Connection.describe_table
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn = Connection(self.region)
conn.describe_table(self.test_table_name)
self.assertEqual(req.call_args[1], {'table_name': 'ci-table'})
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), DESCRIBE_TABLE_DATA
conn = Connection(self.region)
table = conn.describe_table(self.test_table_name)
self.assertIsNone(table)
with patch(PATCH_METHOD) as req:
req.return_value = HttpUnavailable(), None
conn = Connection(self.region)
self.assertRaises(TableError, conn.describe_table, self.test_table_name)
def test_list_tables(self):
"""
Connection.list_tables
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), LIST_TABLE_DATA
conn = Connection(self.region)
conn.list_tables(exclusive_start_table_name='Thread')
self.assertEqual(req.call_args[1], {'exclusive_start_table_name': 'Thread'})
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), LIST_TABLE_DATA
conn = Connection(self.region)
conn.list_tables(limit=3)
self.assertEqual(req.call_args[1], {'limit': 3})
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), LIST_TABLE_DATA
conn = Connection(self.region)
conn.list_tables()
self.assertEqual(req.call_args[1], {})
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), None
conn = Connection(self.region)
self.assertRaises(TableError, conn.list_tables)
def test_delete_item(self):
"""
Connection.delete_item
"""
conn = Connection(self.region)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn.describe_table(self.test_table_name)
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), {}
self.assertRaises(DeleteError, conn.delete_item, self.test_table_name, "foo", "bar")
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.delete_item(
self.test_table_name,
"Amazon DynamoDB",
"How do I update multiple items?")
params = {
'return_consumed_capacity': 'TOTAL',
'key': {
'ForumName': {
'S': 'Amazon DynamoDB'
},
'Subject': {
'S': 'How do I update multiple items?'
}
},
'table_name': self.test_table_name}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.delete_item(
self.test_table_name,
"Amazon DynamoDB",
"How do I update multiple items?",
return_values='ALL_NEW'
)
params = {
'return_consumed_capacity': 'TOTAL',
'key': {
'ForumName': {
'S': 'Amazon DynamoDB'
},
'Subject': {
'S': 'How do I update multiple items?'
}
},
'table_name': self.test_table_name,
'return_values': 'ALL_NEW'
}
self.assertEqual(req.call_args[1], params)
self.assertRaises(
ValueError,
conn.delete_item,
self.test_table_name,
"foo",
"bar",
return_values='bad_values')
self.assertRaises(
ValueError,
conn.delete_item,
self.test_table_name,
"foo",
"bar",
return_consumed_capacity='badvalue')
self.assertRaises(
ValueError,
conn.delete_item,
self.test_table_name,
"foo",
"bar",
return_item_collection_metrics='badvalue')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.delete_item(
self.test_table_name,
"Amazon DynamoDB",
"How do I update multiple items?",
return_consumed_capacity='TOTAL'
)
params = {
'key': {
'ForumName': {
'S': 'Amazon DynamoDB'
},
'Subject': {
'S': 'How do I update multiple items?'
}
},
'table_name': self.test_table_name,
'return_consumed_capacity': 'TOTAL'
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.delete_item(
self.test_table_name,
"Amazon DynamoDB",
"How do I update multiple items?",
return_item_collection_metrics='SIZE'
)
params = {
'key': {
'ForumName': {
'S': 'Amazon DynamoDB'
},
'Subject': {
'S': 'How do I update multiple items?'
}
},
'table_name': self.test_table_name,
'return_item_collection_metrics': 'SIZE',
'return_consumed_capacity': 'TOTAL'
}
self.assertEqual(req.call_args[1], params)
self.assertRaises(
ValueError,
conn.delete_item,
self.test_table_name,
"Foo", "Bar",
expected={'Bad': {'Value': False}}
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.delete_item(
self.test_table_name,
"Amazon DynamoDB",
"How do I update multiple items?",
expected={'ForumName': {'Exists': False}},
return_item_collection_metrics='SIZE'
)
params = {
'key': {
'ForumName': {
'S': 'Amazon DynamoDB'
},
'Subject': {
'S': 'How do I update multiple items?'
}
},
'expected': {
'ForumName': {
'Exists': False
}
},
'table_name': self.test_table_name,
'return_consumed_capacity': 'TOTAL',
'return_item_collection_metrics': 'SIZE'
}
self.assertEqual(req.call_args[1], params)
def test_get_item(self):
"""
Connection.get_item
"""
conn = Connection(self.region)
table_name = 'Thread'
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn.describe_table(table_name)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), GET_ITEM_DATA
item = conn.get_item(table_name, "Amazon DynamoDB", "How do I update multiple items?")
self.assertEqual(item, GET_ITEM_DATA)
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), None
self.assertRaises(
GetError,
conn.get_item,
table_name,
"Amazon DynamoDB",
"How do I update multiple items?"
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), GET_ITEM_DATA
conn.get_item(
table_name,
"Amazon DynamoDB",
"How do I update multiple items?",
attributes_to_get=['ForumName']
)
params = {
'return_consumed_capacity': 'TOTAL',
'attributes_to_get': ['ForumName'],
'key': {
'ForumName': {
'S': 'Amazon DynamoDB'
},
'Subject': {
'S': 'How do I update multiple items?'
}
},
'consistent_read': False,
'table_name': 'Thread'
}
self.assertEqual(req.call_args[1], params)
def test_update_item(self):
"""
Connection.update_item
"""
conn = Connection()
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn.describe_table(self.test_table_name)
self.assertRaises(ValueError, conn.update_item, self.test_table_name, 'foo-key')
attr_updates = {
'Subject': {
'Value': 'foo-subject',
'Action': 'PUT'
},
}
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), {}
self.assertRaises(
UpdateError,
conn.update_item,
self.test_table_name,
'foo-key',
attribute_updates=attr_updates,
range_key='foo-range-key',
)
with patch(PATCH_METHOD) as req:
bad_attr_updates = {
'Subject': {
'Value': 'foo-subject',
'Action': 'BADACTION'
},
}
req.return_value = HttpOK(), {}
self.assertRaises(
ValueError,
conn.update_item,
self.test_table_name,
'foo-key',
attribute_updates=bad_attr_updates,
range_key='foo-range-key',
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.update_item(
self.test_table_name,
'foo-key',
return_consumed_capacity='TOTAL',
return_item_collection_metrics='NONE',
return_values='ALL_NEW',
expected={'Forum': {'Exists': False}},
attribute_updates=attr_updates,
range_key='foo-range-key',
)
params = {
'return_values': 'ALL_NEW',
'return_item_collection_metrics': 'NONE',
'return_consumed_capacity': 'TOTAL',
'key': {
'ForumName': {
'S': 'foo-key'
},
'Subject': {
'S': 'foo-range-key'
}
},
'expected': {
'Forum': {
'Exists': False
}
},
'attribute_updates': {
'Subject': {
'Value': {
'S': 'foo-subject'
},
'Action': 'PUT'
}
},
'table_name': 'ci-table'
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.update_item(
self.test_table_name,
'foo-key',
attribute_updates=attr_updates,
range_key='foo-range-key',
)
params = {
'key': {
'ForumName': {
'S': 'foo-key'
},
'Subject': {
'S': 'foo-range-key'
}
},
'attribute_updates': {
'Subject': {
'Value': {
'S': 'foo-subject'
},
'Action': 'PUT'
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'ci-table'
}
self.assertEqual(req.call_args[1], params)
attr_updates = {
'Subject': {
'Value': {'S': 'foo-subject'},
'Action': 'PUT'
},
}
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.update_item(
self.test_table_name,
'foo-key',
attribute_updates=attr_updates,
range_key='foo-range-key',
)
params = {
'key': {
'ForumName': {
'S': 'foo-key'
},
'Subject': {
'S': 'foo-range-key'
}
},
'attribute_updates': {
'Subject': {
'Value': {
'S': 'foo-subject'
},
'Action': 'PUT'
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'ci-table'
}
self.assertEqual(req.call_args[1], params)
attr_updates = {
'Subject': {
'Value': {'N': '1'},
'Action': 'ADD'
},
}
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.update_item(
self.test_table_name,
'foo-key',
attribute_updates=attr_updates,
range_key='foo-range-key',
)
params = {
'key': {
'ForumName': {
'S': 'foo-key'
},
'Subject': {
'S': 'foo-range-key'
}
},
'attribute_updates': {
'Subject': {
'Value': {
'N': '1'
},
'Action': 'ADD'
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'ci-table'
}
self.assertEqual(req.call_args[1], params)
def test_put_item(self):
"""
Connection.put_item
"""
conn = Connection(self.region)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn.describe_table(self.test_table_name)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.put_item(
self.test_table_name,
'foo-key',
range_key='foo-range-key',
return_consumed_capacity='TOTAL',
return_item_collection_metrics='SIZE',
return_values='ALL_NEW',
attributes={'ForumName': 'foo-value'}
)
params = {
'return_values': 'ALL_NEW',
'return_consumed_capacity': 'TOTAL',
'return_item_collection_metrics': 'SIZE',
'table_name': self.test_table_name,
'item': {
'ForumName': {
'S': 'foo-value'
},
'Subject': {
'S': 'foo-range-key'
}
}
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), {}
self.assertRaises(
PutError,
conn.put_item,
self.test_table_name,
'foo-key',
range_key='foo-range-key',
attributes={'ForumName': 'foo-value'}
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.put_item(
self.test_table_name,
'foo-key',
range_key='foo-range-key',
attributes={'ForumName': 'foo-value'}
)
params = {'table_name': self.test_table_name,
'return_consumed_capacity': 'TOTAL',
'item': {'ForumName': {'S': 'foo-value'}, 'Subject': {'S': 'foo-range-key'}}}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.put_item(
self.test_table_name,
'foo-key',
range_key='foo-range-key',
attributes={'ForumName': 'foo-value'}
)
params = {
'return_consumed_capacity': 'TOTAL',
'item': {
'ForumName': {
'S': 'foo-value'
},
'Subject': {
'S': 'foo-range-key'
}
},
'table_name': self.test_table_name
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.put_item(
self.test_table_name,
'item1-hash',
range_key='item1-range',
attributes={'foo': {'S': 'bar'}},
expected={'Forum': {'Exists': False}}
)
params = {
'return_consumed_capacity': 'TOTAL',
'table_name': self.test_table_name,
'expected': {
'Forum': {
'Exists': False
}
},
'item': {
'ForumName': {
'S': 'item1-hash'
},
'foo': {
'S': 'bar'
},
'Subject': {
'S': 'item1-range'
}
}
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.put_item(
self.test_table_name,
'item1-hash',
range_key='item1-range',
attributes={'foo': {'S': 'bar'}},
expected={'ForumName': {'Value': 'item1-hash'}}
)
params = {
'table_name': self.test_table_name,
'expected': {
'ForumName': {
'Value': {
'S': 'item1-hash'
}
}
},
'return_consumed_capacity': 'TOTAL',
'item': {
'ForumName': {
'S': 'item1-hash'
},
'foo': {
'S': 'bar'
},
'Subject': {
'S': 'item1-range'
}
}
}
self.assertEqual(req.call_args[1], params)
def test_batch_write_item(self):
"""
Connection.batch_write_item
"""
items = []
conn = Connection()
table_name = 'Thread'
for i in range(10):
items.append(
{"ForumName": "FooForum", "Subject": "thread-{0}".format(i)}
)
self.assertRaises(
ValueError,
conn.batch_write_item,
table_name)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn.describe_table(table_name)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.batch_write_item(
table_name,
put_items=items,
return_item_collection_metrics='SIZE',
return_consumed_capacity='TOTAL'
)
params = {
'return_consumed_capacity': 'TOTAL',
'return_item_collection_metrics': 'SIZE',
'request_items': {
'Thread': [
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
]
}
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.batch_write_item(
table_name,
put_items=items
)
params = {
'return_consumed_capacity': 'TOTAL',
'request_items': {
'Thread': [
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
{'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
]
}
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), {}
self.assertRaises(
PutError,
conn.batch_write_item,
table_name,
delete_items=items
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.batch_write_item(
table_name,
delete_items=items
)
params = {
'return_consumed_capacity': 'TOTAL',
'request_items': {
'Thread': [
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
]
}
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.batch_write_item(
table_name,
delete_items=items,
return_consumed_capacity='TOTAL',
return_item_collection_metrics='SIZE'
)
params = {
'return_consumed_capacity': 'TOTAL',
'return_item_collection_metrics': 'SIZE',
'request_items': {
'Thread': [
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
{'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
]
}
}
self.assertEqual(req.call_args[1], params)
def test_batch_get_item(self):
"""
Connection.batch_get_item
"""
items = []
conn = Connection()
table_name = 'Thread'
for i in range(10):
items.append(
{"ForumName": "FooForum", "Subject": "thread-{0}".format(i)}
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn.describe_table(table_name)
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), {}
self.assertRaises(
GetError,
conn.batch_get_item,
table_name,
items,
consistent_read=True,
return_consumed_capacity='TOTAL',
attributes_to_get=['ForumName']
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.batch_get_item(
table_name,
items,
consistent_read=True,
return_consumed_capacity='TOTAL',
attributes_to_get=['ForumName']
)
params = {
'return_consumed_capacity': 'TOTAL',
'request_items': {
'Thread': {
'consistent_read': True,
'attributes_to_get': ['ForumName'],
'Keys': [
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}
]
}
}
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.batch_get_item(
table_name,
items
)
params = {
'return_consumed_capacity': 'TOTAL',
'request_items': {
'Thread': {
'Keys': [
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}},
{'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}
]
}
}
}
self.assertEqual(req.call_args[1], params)
def test_query(self):
"""
Connection.query
"""
conn = Connection()
table_name = 'Thread'
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn.describe_table(table_name)
self.assertRaises(
ValueError,
conn.query,
table_name,
"FooForum",
return_consumed_capacity='TOTAL',
key_conditions={'ForumName': {'ComparisonOperator': 'BAD_OPERATOR', 'AttributeValueList': ['thread']}}
)
self.assertRaises(
ValueError,
conn.query,
table_name,
"FooForum",
return_consumed_capacity='TOTAL',
select='BAD_VALUE',
key_conditions={'ForumName': {'ComparisonOperator': 'BEGINS_WITH', 'AttributeValueList': ['thread']}}
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), {}
self.assertRaises(
QueryError,
conn.query,
table_name,
"FooForum",
scan_index_forward=True,
return_consumed_capacity='TOTAL',
select='ALL_ATTRIBUTES',
key_conditions={'ForumName': {'ComparisonOperator': 'BEGINS_WITH', 'AttributeValueList': ['thread']}}
)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.query(
table_name,
"FooForum",
scan_index_forward=True,
return_consumed_capacity='TOTAL',
select='ALL_ATTRIBUTES',
key_conditions={'ForumName': {'ComparisonOperator': 'BEGINS_WITH', 'AttributeValueList': ['thread']}}
)
params = {
'scan_index_forward': True,
'select': 'ALL_ATTRIBUTES',
'return_consumed_capacity': 'TOTAL',
'key_conditions': {
'ForumName': {
'ComparisonOperator': 'BEGINS_WITH', 'AttributeValueList': [{
'S': 'thread'
}]
}
},
'table_name': 'Thread'
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.query(
table_name,
"FooForum",
key_conditions={'ForumName': {'ComparisonOperator': 'BEGINS_WITH', 'AttributeValueList': ['thread']}}
)
params = {
'return_consumed_capacity': 'TOTAL',
'key_conditions': {
'ForumName': {
'ComparisonOperator': 'BEGINS_WITH', 'AttributeValueList': [{
'S': 'thread'
}]
}
},
'table_name': 'Thread'
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.query(
table_name,
"FooForum",
limit=1,
index_name='LastPostIndex',
attributes_to_get=['ForumName'],
exclusive_start_key="FooForum",
consistent_read=True
)
params = {
'limit': 1,
'return_consumed_capacity': 'TOTAL',
'consistent_read': True,
'exclusive_start_key': {
'ForumName': {
'S': 'FooForum'
}
},
'index_name': 'LastPostIndex',
'attributes_to_get': ['ForumName'],
'key_conditions': {
'ForumName': {
'ComparisonOperator': 'EQ', 'AttributeValueList': [{
'S': 'FooForum'
}]
}
},
'table_name': 'Thread'
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.query(
table_name,
"FooForum",
select='ALL_ATTRIBUTES',
exclusive_start_key="FooForum"
)
params = {
'return_consumed_capacity': 'TOTAL',
'exclusive_start_key': {
'ForumName': {
'S': 'FooForum'
}
},
'key_conditions': {
'ForumName': {
'ComparisonOperator': 'EQ', 'AttributeValueList': [{
'S': 'FooForum'
}]
}
},
'table_name': 'Thread',
'select': 'ALL_ATTRIBUTES'
}
self.assertEqual(req.call_args[1], params)
def test_scan(self):
"""
Connection.scan
"""
conn = Connection()
table_name = 'Thread'
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), DESCRIBE_TABLE_DATA
conn.describe_table(table_name)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.scan(
table_name,
segment=0,
total_segments=22,
)
params = {
'return_consumed_capacity': 'TOTAL',
'table_name': table_name,
'segment': 0,
'total_segments': 22,
}
self.assertDictEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.scan(
table_name,
return_consumed_capacity='TOTAL',
exclusive_start_key="FooForum",
limit=1,
segment=2,
total_segments=4,
attributes_to_get=['ForumName']
)
params = {
'attributes_to_get': ['ForumName'],
'exclusive_start_key': {
"ForumName": {
"S": "FooForum"
}
},
'table_name': table_name,
'limit': 1,
'segment': 2,
'total_segments': 4,
'return_consumed_capacity': 'TOTAL'
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.scan(
table_name,
)
params = {
'return_consumed_capacity': 'TOTAL',
'table_name': table_name
}
self.assertEqual(req.call_args[1], params)
kwargs = {
'scan_filter': {
'ForumName': {
'ComparisonOperator': 'BadOperator',
'AttributeValueList': ['Foo']
}
}
}
self.assertRaises(
ValueError,
conn.scan,
table_name,
**kwargs)
kwargs = {
'scan_filter': {
'ForumName': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': ['Foo']
}
}
}
with patch(PATCH_METHOD) as req:
req.return_value = HttpBadRequest(), {}
self.assertRaises(
ScanError,
conn.scan,
table_name,
**kwargs)
kwargs = {
'scan_filter': {
'ForumName': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': ['Foo']
}
}
}
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
conn.scan(
table_name,
**kwargs
)
params = {
'return_consumed_capacity': 'TOTAL',
'table_name': table_name,
'scan_filter': {
'ForumName': {
'AttributeValueList': [
{'S': 'Foo'}
],
'ComparisonOperator': 'BEGINS_WITH'
}
}
}
self.assertEqual(req.call_args[1], params)
| mtsgrd/PynamoDB2 | pynamodb/tests/test_base_connection.py | Python | mit | 49,223 |
# -*- encoding: utf-8 -*-
from supriya.tools.ugentools.Index import Index
class WrapIndex(Index):
r'''
::
>>> source = ugentools.In.ar(bus=0)
>>> wrap_index = ugentools.WrapIndex.ar(
... buffer_id=buffer_id,
... source=source,
... )
>>> wrap_index
WrapIndex.ar()
'''
### CLASS VARIABLES ###
__documentation_section__ = None
__slots__ = ()
_ordered_input_names = (
'buffer_id',
'source',
)
_valid_calculation_rates = None
### INITIALIZER ###
def __init__(
self,
calculation_rate=None,
buffer_id=None,
source=None,
):
Index.__init__(
self,
calculation_rate=calculation_rate,
buffer_id=buffer_id,
source=source,
)
### PUBLIC METHODS ###
@classmethod
def ar(
cls,
buffer_id=None,
source=None,
):
r'''Constructs an audio-rate WrapIndex.
::
>>> source = ugentools.In.ar(bus=0)
>>> wrap_index = ugentools.WrapIndex.ar(
... buffer_id=buffer_id,
... source=source,
... )
>>> wrap_index
WrapIndex.ar()
Returns ugen graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.AUDIO
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
buffer_id=buffer_id,
source=source,
)
return ugen
@classmethod
def kr(
cls,
buffer_id=None,
source=None,
):
r'''Constructs a control-rate WrapIndex.
::
>>> source = ugentools.In.ar(bus=0)
>>> wrap_index = ugentools.WrapIndex.kr(
... buffer_id=buffer_id,
... source=source,
... )
>>> wrap_index
WrapIndex.kr()
Returns ugen graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.CONTROL
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
buffer_id=buffer_id,
source=source,
)
return ugen
### PUBLIC PROPERTIES ###
@property
def buffer_id(self):
r'''Gets `buffer_id` input of WrapIndex.
::
>>> source = ugentools.In.ar(bus=0)
>>> wrap_index = ugentools.WrapIndex.ar(
... buffer_id=buffer_id,
... source=source,
... )
>>> wrap_index.buffer_id
Returns ugen input.
'''
index = self._ordered_input_names.index('buffer_id')
return self._inputs[index]
@property
def source(self):
r'''Gets `source` input of WrapIndex.
::
>>> source = ugentools.In.ar(bus=0)
>>> wrap_index = ugentools.WrapIndex.ar(
... buffer_id=buffer_id,
... source=source,
... )
>>> wrap_index.source
OutputProxy(
source=In(
bus=0.0,
calculation_rate=CalculationRate.AUDIO,
channel_count=1
),
output_index=0
)
Returns ugen input.
'''
index = self._ordered_input_names.index('source')
return self._inputs[index] | andrewyoung1991/supriya | supriya/tools/pendingugentools/WrapIndex.py | Python | mit | 3,591 |
''' -- imports from python libraries -- '''
# from datetime import datetime
import datetime
import json
''' -- imports from installed packages -- '''
from django.http import HttpResponseRedirect # , HttpResponse uncomment when to use
from django.http import HttpResponse
from django.http import Http404
from django.shortcuts import render_to_response # , render uncomment when to use
from django.template import RequestContext
from django.template import TemplateDoesNotExist
from django.template.loader import render_to_string
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
try:
from bson import ObjectId
except ImportError: # old pymongo
from pymongo.objectid import ObjectId
''' -- imports from application folders/files -- '''
from gnowsys_ndf.settings import GAPPS, MEDIA_ROOT, GSTUDIO_TASK_TYPES
from gnowsys_ndf.ndf.models import NodeJSONEncoder
from gnowsys_ndf.ndf.models import Node, AttributeType, RelationType
from gnowsys_ndf.ndf.models import node_collection, triple_collection
from gnowsys_ndf.ndf.views.file import save_file
from gnowsys_ndf.ndf.templatetags.ndf_tags import edit_drawer_widget
from gnowsys_ndf.ndf.views.methods import get_node_common_fields, parse_template_data, get_execution_time, delete_node
from gnowsys_ndf.ndf.views.notify import set_notif_val
from gnowsys_ndf.ndf.views.methods import get_property_order_with_value
from gnowsys_ndf.ndf.views.methods import create_gattribute, create_grelation, create_task
GST_COURSE = node_collection.one({'_type': "GSystemType", 'name': GAPPS[7]})
app = GST_COURSE
# @login_required
@get_execution_time
def course(request, group_id, course_id=None):
"""
* Renders a list of all 'courses' available within the database.
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
if course_id is None:
course_ins = node_collection.find_one({'_type': "GSystemType", "name": "Course"})
if course_ins:
course_id = str(course_ins._id)
if request.method == "POST":
# Course search view
title = GST_COURSE.name
search_field = request.POST['search_field']
course_coll = node_collection.find({'member_of': {'$all': [ObjectId(GST_COURSE._id)]},
'$or': [
{'$and': [
{'name': {'$regex': search_field, '$options': 'i'}},
{'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [{'access_policy': u"PRIVATE"}, {'created_by': request.user.id}]}
]
}
]
},
{'$and': [
{'tags': {'$regex': search_field, '$options': 'i'}},
{'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [{'access_policy': u"PRIVATE"}, {'created_by': request.user.id}]}
]
}
]
}
],
'group_set': {'$all': [ObjectId(group_id)]}
}).sort('last_update', -1)
# course_nodes_count = course_coll.count()
return render_to_response("ndf/course.html",
{'title': title,
'appId': app._id,
'searching': True, 'query': search_field,
'course_coll': course_coll, 'groupid': group_id, 'group_id':group_id
},
context_instance=RequestContext(request)
)
else:
# Course list view
title = GST_COURSE.name
course_coll = node_collection.find({'member_of': {'$all': [ObjectId(course_id)]},
'group_set': {'$all': [ObjectId(group_id)]},
'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [
{'access_policy': u"PRIVATE"},
{'created_by': request.user.id}
]
}
]
})
template = "ndf/course.html"
variable = RequestContext(request, {'title': title, 'course_nodes_count': course_coll.count(), 'course_coll': course_coll, 'groupid':group_id, 'appId':app._id, 'group_id':group_id})
return render_to_response(template, variable)
@login_required
@get_execution_time
def create_edit(request, group_id, node_id=None):
"""Creates/Modifies details about the given quiz-item.
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
context_variables = {'title': GST_COURSE.name,
'group_id': group_id,
'groupid': group_id
}
if node_id:
course_node = node_collection.one({'_type': u'GSystem', '_id': ObjectId(node_id)})
else:
course_node = node_collection.collection.GSystem()
available_nodes = node_collection.find({'_type': u'GSystem', 'member_of': ObjectId(GST_COURSE._id),'group_set': ObjectId(group_id) })
nodes_list = []
for each in available_nodes:
nodes_list.append(str((each.name).strip().lower()))
if request.method == "POST":
# get_node_common_fields(request, course_node, group_id, GST_COURSE)
course_node.save(is_changed=get_node_common_fields(request, course_node, group_id, GST_COURSE))
return HttpResponseRedirect(reverse('course', kwargs={'group_id': group_id}))
else:
if node_id:
context_variables['node'] = course_node
context_variables['groupid'] = group_id
context_variables['group_id'] = group_id
context_variables['appId'] = app._id
context_variables['nodes_list'] = json.dumps(nodes_list)
return render_to_response("ndf/course_create_edit.html",
context_variables,
context_instance=RequestContext(request)
)
@login_required
@get_execution_time
def course_detail(request, group_id, _id):
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
course_structure_exists = False
title = GST_COURSE.name
course_node = node_collection.one({"_id": ObjectId(_id)})
if course_node.collection_set:
course_structure_exists = True
return render_to_response("ndf/course_detail.html",
{'node': course_node,
'groupid': group_id,
'group_id': group_id,
'appId': app._id,
'title':title,
'course_structure_exists': course_structure_exists
},
context_instance=RequestContext(request)
)
@login_required
@get_execution_time
def course_create_edit(request, group_id, app_id, app_set_id=None, app_set_instance_id=None, app_name=None):
"""
Creates/Modifies document of given sub-types of Course(s).
"""
auth = None
if ObjectId.is_valid(group_id) is False:
group_ins = node_collection.one({'_type': "Group", "name": group_id})
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
if auth:
group_id = str(auth._id)
else:
pass
app = None
if app_id is None:
app = node_collection.one({'_type': "GSystemType", 'name': app_name})
if app:
app_id = str(app._id)
else:
app = node_collection.one({'_id': ObjectId(app_id)})
app_name = app.name
# app_set = ""
app_collection_set = []
title = ""
course_gst = None
course_gs = None
mis_admin = None
property_order_list = []
template = ""
template_prefix = "mis"
if request.user:
if auth is None:
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
agency_type = auth.agency_type
agency_type_node = node_collection.one({
'_type': "GSystemType", 'name': agency_type
}, {
'collection_set': 1
})
if agency_type_node:
for eachset in agency_type_node.collection_set:
app_collection_set.append(
node_collection.one({
"_id": eachset
}, {
'_id': 1, 'name': 1, 'type_of': 1
})
)
if app_set_id:
course_gst = node_collection.one({
'_type': "GSystemType", '_id': ObjectId(app_set_id)
}, {
'name': 1, 'type_of': 1
})
template = "ndf/" + course_gst.name.strip().lower().replace(' ', '_') \
+ "_create_edit.html"
title = course_gst.name
if app_set_instance_id:
course_gs = node_collection.one({
'_type': "GSystem", '_id': ObjectId(app_set_instance_id)
})
else:
course_gs = node_collection.collection.GSystem()
course_gs.member_of.append(course_gst._id)
property_order_list = get_property_order_with_value(course_gs)
if request.method == "POST":
# [A] Save course-node's base-field(s)
start_time = ""
if "start_time" in request.POST:
start_time = request.POST.get("start_time", "")
start_time = datetime.datetime.strptime(start_time, "%m/%Y")
end_time = ""
if "end_time" in request.POST:
end_time = request.POST.get("end_time", "")
end_time = datetime.datetime.strptime(end_time, "%m/%Y")
nussd_course_type = ""
if "nussd_course_type" in request.POST:
nussd_course_type = request.POST.get("nussd_course_type", "")
nussd_course_type = unicode(nussd_course_type)
unset_ac_options = []
if "unset-ac-options" in request.POST:
unset_ac_options = request.POST.getlist("unset-ac-options")
else:
# Just to execute loop at least once for Course Sub-Types
# other than 'Announced Course'
unset_ac_options = ["dummy"]
if course_gst.name == u"Announced Course":
announce_to_colg_list = request.POST.get(
"announce_to_colg_list", ""
)
announce_to_colg_list = [ObjectId(colg_id) for colg_id in announce_to_colg_list.split(",")]
colg_ids = []
# Parsing ObjectId -- from string format to ObjectId
for each in announce_to_colg_list:
if each and ObjectId.is_valid(each):
colg_ids.append(ObjectId(each))
# Fetching college(s)
colg_list_cur = node_collection.find({
'_id': {'$in': colg_ids}
}, {
'name': 1, 'attribute_set.enrollment_code': 1
})
if "_id" in course_gs:
# It means we are in editing mode of given Announced Course GSystem
unset_ac_options = [course_gs._id]
ac_nc_code_list = []
# Prepare a list
# 0th index (ac_node): Announced Course node,
# 1st index (nc_id): NUSSD Course node's ObjectId,
# 2nd index (nc_course_code): NUSSD Course's code
for cid in unset_ac_options:
ac_node = None
nc_id = None
nc_course_code = ""
# Here course_gst is Announced Course GSytemType's node
ac_node = node_collection.one({
'_id': ObjectId(cid), 'member_of': course_gst._id
})
# If ac_node found, means
# (1) we are dealing with creating Announced Course
# else,
# (2) we are in editing phase of Announced Course
course_node = None
if not ac_node:
# In this case, cid is of NUSSD Course GSystem
# So fetch that to extract course_code
# Set to nc_id
ac_node = None
course_node = node_collection.one({
'_id': ObjectId(cid)
})
else:
# In this case, fetch NUSSD Course from
# Announced Course GSystem's announced_for relationship
for rel in ac_node.relation_set:
if "announced_for" in rel:
course_node_ids = rel["announced_for"]
break
# Fetch NUSSD Course GSystem
if course_node_ids:
course_node = node_collection.find_one({
"_id": {"$in": course_node_ids}
})
# If course_code doesn't exists then
# set NUSSD Course GSystem's name as course_code
if course_node:
nc_id = course_node._id
for attr in course_node.attribute_set:
if "course_code" in attr:
nc_course_code = attr["course_code"]
break
if not nc_course_code:
nc_course_code = course_node.name.replace(" ", "-")
# Append to ac_nc_code_list
ac_nc_code_list.append([ac_node, nc_id, nc_course_code])
# For each selected college
# Create Announced Course GSystem
for college_node in colg_list_cur:
# Fetch Enrollment code from "enrollment_code" (Attribute)
college_enrollment_code = ""
if college_node:
for attr in college_node.attribute_set:
if attr and "enrollment_code" in attr:
college_enrollment_code = attr["enrollment_code"]
break
ann_course_id_list = []
# For each selected course to Announce
for ac_nc_code in ac_nc_code_list:
course_gs = ac_nc_code[0]
nc_id = ac_nc_code[1]
nc_course_code = ac_nc_code[2]
if not course_gs:
# Create new Announced Course GSystem
course_gs = node_collection.collection.GSystem()
course_gs.member_of.append(course_gst._id)
# Prepare name for Announced Course GSystem
c_name = unicode(
nc_course_code + "_" + college_enrollment_code + "_"
+ start_time.strftime("%b_%Y") + "-"
+ end_time.strftime("%b_%Y")
)
request.POST["name"] = c_name
is_changed = get_node_common_fields(
request, course_gs, group_id, course_gst
)
if is_changed:
# Remove this when publish button is setup on interface
course_gs.status = u"PUBLISHED"
course_gs.save(is_changed=is_changed)
# [B] Store AT and/or RT field(s) of given course-node
for tab_details in property_order_list:
for field_set in tab_details[1]:
# Fetch only Attribute field(s) / Relation field(s)
if '_id' in field_set:
field_instance = node_collection.one({
'_id': field_set['_id']
})
field_instance_type = type(field_instance)
if (field_instance_type in
[AttributeType, RelationType]):
field_data_type = field_set['data_type']
# Fetch field's value depending upon AT/RT
# and Parse fetched-value depending upon
# that field's data-type
if field_instance_type == AttributeType:
if "File" in field_instance["validators"]:
# Special case: AttributeTypes that require file instance as it's value in which case file document's ObjectId is used
if field_instance["name"] in request.FILES:
field_value = request.FILES[field_instance["name"]]
else:
field_value = ""
# Below 0th index is used because that function returns tuple(ObjectId, bool-value)
if field_value != '' and field_value != u'':
file_name = course_gs.name + " -- " + field_instance["altnames"]
content_org = ""
tags = ""
field_value = save_file(field_value, file_name, request.user.id, group_id, content_org, tags, oid=True)[0]
else:
# Other AttributeTypes
field_value = request.POST.get(field_instance["name"], "")
if field_instance["name"] in ["start_time", "end_time"]:
# Course Duration
field_value = parse_template_data(field_data_type, field_value, date_format_string="%m/%Y")
else:
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
course_gs_triple_instance = create_gattribute(course_gs._id, node_collection.collection.AttributeType(field_instance), field_value)
else:
# i.e if field_instance_type == RelationType
if field_instance["name"] == "announced_for":
field_value = ObjectId(nc_id)
# Pass ObjectId of selected Course
elif field_instance["name"] == "acourse_for_college":
field_value = college_node._id
# Pass ObjectId of selected College
course_gs_triple_instance = create_grelation(course_gs._id, node_collection.collection.RelationType(field_instance), field_value)
ann_course_id_list.append(course_gs._id)
else:
is_changed = get_node_common_fields(request, course_gs, group_id, course_gst)
if is_changed:
# Remove this when publish button is setup on interface
course_gs.status = u"PUBLISHED"
course_gs.save(is_changed=is_changed)
# [B] Store AT and/or RT field(s) of given course-node
for tab_details in property_order_list:
for field_set in tab_details[1]:
# Fetch only Attribute field(s) / Relation field(s)
if '_id' in field_set:
field_instance = node_collection.one({'_id': field_set['_id']})
field_instance_type = type(field_instance)
if field_instance_type in [AttributeType, RelationType]:
field_data_type = field_set['data_type']
# Fetch field's value depending upon AT/RT
# and Parse fetched-value depending upon
# that field's data-type
if field_instance_type == AttributeType:
if "File" in field_instance["validators"]:
# Special case: AttributeTypes that require file instance as it's value in which case file document's ObjectId is used
if field_instance["name"] in request.FILES:
field_value = request.FILES[field_instance["name"]]
else:
field_value = ""
# Below 0th index is used because that function returns tuple(ObjectId, bool-value)
if field_value != '' and field_value != u'':
file_name = course_gs.name + " -- " + field_instance["altnames"]
content_org = ""
tags = ""
field_value = save_file(field_value, file_name, request.user.id, group_id, content_org, tags, oid=True)[0]
else:
# Other AttributeTypes
field_value = request.POST.get(field_instance["name"], "")
# if field_instance["name"] in ["start_time","end_time"]:
# field_value = parse_template_data(field_data_type, field_value, date_format_string="%m/%Y")
# elif field_instance["name"] in ["start_enroll", "end_enroll"]: #Student Enrollment DUration
# field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y")
if field_instance["name"] in ["mast_tr_qualifications", "voln_tr_qualifications"]:
# Needs sepcial kind of parsing
field_value = []
tr_qualifications = request.POST.get(field_instance["name"], '')
if tr_qualifications:
qualifications_dict = {}
tr_qualifications = [qual.strip() for qual in tr_qualifications.split(",")]
for i, qual in enumerate(tr_qualifications):
if (i % 2) == 0:
if qual == "true":
qualifications_dict["mandatory"] = True
elif qual == "false":
qualifications_dict["mandatory"] = False
else:
qualifications_dict["text"] = unicode(qual)
field_value.append(qualifications_dict)
qualifications_dict = {}
elif field_instance["name"] in ["max_marks", "min_marks"]:
# Needed because both these fields' values are dependent upon evaluation_type field's value
evaluation_type = request.POST.get("evaluation_type", "")
if evaluation_type == u"Continuous":
field_value = None
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
else:
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
course_gs_triple_instance = create_gattribute(
course_gs._id,
node_collection.collection.AttributeType(field_instance),
field_value
)
else:
#i.e if field_instance_type == RelationType
if field_instance["name"] == "announced_for":
field_value = ObjectId(cid)
#Pass ObjectId of selected Course
elif field_instance["name"] == "acourse_for_college":
field_value = college_node._id
#Pass ObjectId of selected College
course_gs_triple_instance = create_grelation(
course_gs._id,
node_collection.collection.RelationType(field_instance),
field_value
)
return HttpResponseRedirect(
reverse(
app_name.lower() + ":" + template_prefix + '_app_detail',
kwargs={
'group_id': group_id, "app_id": app_id,
"app_set_id": app_set_id
}
)
)
univ = node_collection.one({
'_type': "GSystemType", 'name': "University"
}, {
'_id': 1
})
university_cur = None
if not mis_admin:
mis_admin = node_collection.one(
{'_type': "Group", 'name': "MIS_admin"},
{'_id': 1, 'name': 1, 'group_admin': 1}
)
if univ and mis_admin:
university_cur = node_collection.find(
{'member_of': univ._id, 'group_set': mis_admin._id},
{'name': 1}
).sort('name', 1)
default_template = "ndf/course_create_edit.html"
context_variables = {
'groupid': group_id, 'group_id': group_id,
'app_id': app_id, 'app_name': app_name,
'app_collection_set': app_collection_set,
'app_set_id': app_set_id,
'title': title,
'university_cur': university_cur,
'property_order_list': property_order_list
}
if app_set_instance_id:
course_gs.get_neighbourhood(course_gs.member_of)
context_variables['node'] = course_gs
if "Announced Course" in course_gs.member_of_names_list:
for attr in course_gs.attribute_set:
if attr:
for eachk, eachv in attr.items():
context_variables[eachk] = eachv
for rel in course_gs.relation_set:
if rel:
for eachk, eachv in rel.items():
if eachv:
get_node_name = node_collection.one({'_id': eachv[0]})
context_variables[eachk] = get_node_name.name
try:
return render_to_response(
[template, default_template],
context_variables, context_instance=RequestContext(request)
)
except TemplateDoesNotExist as tde:
error_message = "\n CourseCreateEditViewError: This html template (" \
+ str(tde) + ") does not exists !!!\n"
raise Http404(error_message)
except Exception as e:
error_message = "\n CourseCreateEditViewError: " + str(e) + " !!!\n"
raise Exception(error_message)
@login_required
@get_execution_time
def mis_course_detail(request, group_id, app_id=None, app_set_id=None, app_set_instance_id=None, app_name=None):
"""
Detail view of NUSSD Course/ Announced Course
"""
# print "\n Found course_detail n gone inn this...\n\n"
auth = None
if ObjectId.is_valid(group_id) is False:
group_ins = node_collection.one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
app = None
if app_id is None:
app = node_collection.one({'_type': "GSystemType", 'name': app_name})
if app:
app_id = str(app._id)
else:
app = node_collection.one({'_id': ObjectId(app_id)})
app_name = app.name
# app_name = "mis"
app_set = ""
app_collection_set = []
title = ""
course_gst = None
course_gs = None
nodes = None
node = None
property_order_list = []
property_order_list_ac = []
is_link_needed = True # This is required to show Link button on interface that link's Student's/VoluntaryTeacher's node with it's corresponding Author node
template_prefix = "mis"
context_variables = {}
#Course structure collection _dict
course_collection_dict = {}
course_collection_list = []
course_structure_exists = False
if request.user:
if auth is None:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username)})
if auth:
agency_type = auth.agency_type
agency_type_node = node_collection.one({'_type': "GSystemType", 'name': agency_type}, {'collection_set': 1})
if agency_type_node:
for eachset in agency_type_node.collection_set:
app_collection_set.append(node_collection.one({"_id": eachset}, {'_id': 1, 'name': 1, 'type_of': 1}))
if app_set_id:
course_gst = node_collection.one({'_type': "GSystemType", '_id': ObjectId(app_set_id)}, {'name': 1, 'type_of': 1})
title = course_gst.name
template = "ndf/course_list.html"
if request.method == "POST":
search = request.POST.get("search", "")
classtype = request.POST.get("class", "")
# nodes = list(node_collection.find({'name':{'$regex':search, '$options': 'i'},'member_of': {'$all': [course_gst._id]}}))
nodes = node_collection.find({'member_of': course_gst._id, 'name': {'$regex': search, '$options': 'i'}})
else:
nodes = node_collection.find({'member_of': course_gst._id, 'group_set': ObjectId(group_id)})
if app_set_instance_id:
template = "ndf/course_details.html"
node = node_collection.one({'_type': "GSystem", '_id': ObjectId(app_set_instance_id)})
property_order_list = get_property_order_with_value(node)
node.get_neighbourhood(node.member_of)
if title == u"Announced Course":
property_order_list_ac = node.attribute_set
# Course structure as list of dicts
if node.collection_set:
course_structure_exists = True
context_variables = { 'groupid': group_id, 'group_id': group_id,
'app_id': app_id, 'app_name': app_name, 'app_collection_set': app_collection_set,
'app_set_id': app_set_id,
'course_gst_name': course_gst.name,
'title': title,
'course_structure_exists': course_structure_exists,
'nodes': nodes, 'node': node,
'property_order_list': property_order_list,
'property_order_list_ac': property_order_list_ac,
'is_link_needed': is_link_needed
}
try:
# print "\n template-list: ", [template, default_template]
# template = "ndf/fgh.html"
# default_template = "ndf/dsfjhk.html"
# return render_to_response([template, default_template],
return render_to_response(template,
context_variables,
context_instance = RequestContext(request)
)
except TemplateDoesNotExist as tde:
error_message = "\n CourseDetailListViewError: This html template (" + str(tde) + ") does not exists !!!\n"
raise Http404(error_message)
except Exception as e:
error_message = "\n CourseDetailListViewError: " + str(e) + " !!!\n"
raise Exception(error_message)
@login_required
@get_execution_time
def create_course_struct(request, group_id, node_id):
"""
This view is to create the structure of the Course.
A Course holds CourseSection, which further holds CourseSubSection
in their respective collection_set.
A tree depiction to this is as follows:
Course Name:
1. CourseSection1
1.1. CourseSubSection1
1.2. CourseSubSection2
2. CourseSection2
2.1. CourseSubSection3
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
app_id = None
app_set_id = None
property_order_list_cs = []
property_order_list_css = []
course_structure_exists = False
title = "Course Authoring"
course_node = node_collection.one({"_id": ObjectId(node_id)})
cs_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSection"})
cs_gs = node_collection.collection.GSystem()
cs_gs.member_of.append(cs_gst._id)
property_order_list_cs = get_property_order_with_value(cs_gs)
css_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSubSection"})
css_gs = node_collection.collection.GSystem()
css_gs.member_of.append(css_gst._id)
property_order_list_css = get_property_order_with_value(css_gs)
course_collection_list = course_node.collection_set
if course_collection_list:
course_structure_exists = True
# for attr in course_node.attribute_set:
# if attr.has_key("evaluation_type"):
# eval_type = attr["evaluation_type"]
#If evaluation_type flag is True, it is Final. If False, it is Continous
# if(eval_type==u"Final"):
# eval_type_flag = True
# else:
# eval_type_flag = False
if request.method == "GET":
app_id = request.GET.get("app_id", "")
app_set_id = request.GET.get("app_set_id", "")
return render_to_response("ndf/create_course_structure.html",
{'cnode': course_node,
'groupid': group_id,
'group_id': group_id,
'title': title,
'app_id': app_id, 'app_set_id': app_set_id,
'property_order_list': property_order_list_cs,
'property_order_list_css': property_order_list_css
},
context_instance=RequestContext(request)
)
@login_required
def save_course_section(request, group_id):
'''
Accepts:
* NUSSD Course/Course node _id
* CourseSection name
Actions:
* Creates CourseSection GSystem with name received.
* Appends this new CourseSection node id into
NUSSD Course/Course collection_set
Returns:
* success (i.e True/False)
* ObjectId of CourseSection node
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
cs_node_name = request.POST.get("cs_name", '')
course_node_id = request.POST.get("course_node_id", '')
cs_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSection"})
cs_new = node_collection.collection.GSystem()
cs_new.member_of.append(cs_gst._id)
cs_new.name = cs_node_name
cs_new.modified_by = int(request.user.id)
cs_new.created_by = int(request.user.id)
cs_new.contributors.append(int(request.user.id))
course_node = node_collection.one({"_id": ObjectId(course_node_id)})
cs_new.prior_node.append(ObjectId(course_node._id))
cs_new.save()
node_collection.collection.update({'_id': course_node._id}, {'$push': {'collection_set': cs_new._id }}, upsert=False, multi=False)
response_dict["success"] = True
response_dict["cs_new_id"] = str(cs_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def save_course_sub_section(request, group_id):
'''
Accepts:
* CourseSection node _id
* CourseSubSection name
Actions:
* Creates CourseSubSection GSystem with name received.
* Appends this new CourseSubSection node id into
CourseSection collection_set
Returns:
* success (i.e True/False)
* ObjectId of CourseSubSection node
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
css_node_name = request.POST.get("css_name", '')
cs_node_id = request.POST.get("cs_node_id", '')
css_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSubSection"})
css_new = node_collection.collection.GSystem()
css_new.member_of.append(css_gst._id)
# set name
css_new.name = css_node_name
css_new.modified_by = int(request.user.id)
css_new.created_by = int(request.user.id)
css_new.contributors.append(int(request.user.id))
cs_node = node_collection.one({"_id": ObjectId(cs_node_id)})
css_new.prior_node.append(cs_node._id)
css_new.save()
node_collection.collection.update({'_id': cs_node._id}, {'$push': {'collection_set': css_new._id }}, upsert=False, multi=False)
response_dict["success"] = True
response_dict["css_new_id"] = str(css_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def change_node_name(request, group_id):
'''
Accepts:
* CourseSection/ CourseSubSection node _id
* New name for CourseSection node
Actions:
* Updates received node's name
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
node_id = request.POST.get("node_id", '')
new_name = request.POST.get("new_name", '')
node = node_collection.one({"_id": ObjectId(node_id)})
node.name = new_name.strip()
node.save()
response_dict["success"] = True
return HttpResponse(json.dumps(response_dict))
@login_required
def change_order(request, group_id):
'''
Accepts:
* 2 node ids.
Basically, either of CourseSection or CourseSubSection
* Parent node id
Either a NUSSD Course/Course or CourseSection
Actions:
* Swaps the 2 node ids in the collection set of received
parent node
'''
response_dict = {"success": False}
collection_set_list = []
if request.is_ajax() and request.method == "POST":
node_id_up = request.POST.get("node_id_up", '')
node_id_down = request.POST.get("node_id_down", '')
parent_node_id = request.POST.get("parent_node", '')
parent_node = node_collection.one({"_id": ObjectId(parent_node_id)})
collection_set_list = parent_node.collection_set
a, b = collection_set_list.index(ObjectId(node_id_up)), collection_set_list.index(ObjectId(node_id_down))
collection_set_list[b], collection_set_list[a] = collection_set_list[a], collection_set_list[b]
node_collection.collection.update({'_id': parent_node._id}, {'$set': {'collection_set': collection_set_list }}, upsert=False, multi=False)
parent_node.reload()
response_dict["success"] = True
return HttpResponse(json.dumps(response_dict))
@login_required
def course_sub_section_prop(request, group_id):
'''
Accepts:
* CourseSubSection node _id
* Properties dict
Actions:
* Creates GAttributes with the values of received dict
for the respective CourseSubSection node
Returns:
* success (i.e True/False)
* If request.method is POST, all GAttributes in a dict structure,
'''
response_dict = {"success": False}
if request.is_ajax():
if request.method == "POST":
assessment_flag = False
css_node_id = request.POST.get("css_node_id", '')
prop_dict = request.POST.get("prop_dict", '')
assessment_chk = json.loads(request.POST.get("assessment_chk", ''))
prop_dict = json.loads(prop_dict)
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
at_cs_hours = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_minutes'})
at_cs_assessment = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_assessment'})
at_cs_assignment = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_assignment'})
at_cs_min_marks = node_collection.one({'_type': 'AttributeType', 'name': 'min_marks'})
at_cs_max_marks = node_collection.one({'_type': 'AttributeType', 'name': 'max_marks'})
if assessment_chk is True:
create_gattribute(css_node._id, at_cs_assessment, True)
assessment_flag = True
for propk, propv in prop_dict.items():
# add attributes to css gs
if(propk == "course_structure_minutes"):
create_gattribute(css_node._id, at_cs_hours, int(propv))
elif(propk == "course_structure_assignment"):
create_gattribute(css_node._id, at_cs_assignment, propv)
if assessment_flag:
if(propk == "min_marks"):
create_gattribute(css_node._id, at_cs_min_marks, int(propv))
if(propk == "max_marks"):
create_gattribute(css_node._id, at_cs_max_marks, int(propv))
css_node.reload()
response_dict["success"] = True
else:
css_node_id = request.GET.get("css_node_id", '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
if css_node.attribute_set:
for each in css_node.attribute_set:
for k, v in each.items():
response_dict[k] = v
response_dict["success"] = True
else:
response_dict["success"] = False
return HttpResponse(json.dumps(response_dict))
@login_required
def add_units(request, group_id):
'''
Accepts:
* CourseSubSection node _id
* NUSSD Course/Course node _id
Actions:
* Redirects to course_units.html
'''
variable = None
unit_node = None
css_node_id = request.GET.get('css_node_id', '')
unit_node_id = request.GET.get('unit_node_id', '')
course_node_id = request.GET.get('course_node', '')
app_id = request.GET.get('app_id', '')
app_set_id = request.GET.get('app_set_id', '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
course_node = node_collection.one({"_id": ObjectId(course_node_id)})
title = "Course Units"
try:
unit_node = node_collection.one({"_id": ObjectId(unit_node_id)})
except:
unit_node = None
variable = RequestContext(request, {
'group_id': group_id, 'groupid': group_id,
'css_node': css_node,
'title': title,
'app_set_id': app_set_id,
'app_id': app_id,
'unit_node': unit_node,
'course_node': course_node,
})
template = "ndf/course_units.html"
return render_to_response(template, variable)
@login_required
def get_resources(request, group_id):
'''
Accepts:
* Name of GSystemType (Page, File, etc.)
* CourseSubSection node _id
* widget_for
Actions:
* Fetches all GSystems of selected GSystemType as resources
Returns:
* Returns Drawer with resources
'''
response_dict = {'success': False, 'message': ""}
try:
if request.is_ajax() and request.method == "POST":
css_node_id = request.POST.get('css_node_id', "")
unit_node_id = request.POST.get('unit_node_id', "")
widget_for = request.POST.get('widget_for', "")
resource_type = request.POST.get('resource_type', "")
resource_type = resource_type.strip()
list_resources = []
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
try:
unit_node = node_collection.one({"_id": ObjectId(unit_node_id)})
except:
unit_node = None
if resource_type:
if resource_type == "Pandora":
resource_type = "Pandora_video"
resource_gst = node_collection.one({'_type': "GSystemType", 'name': resource_type})
res = node_collection.find(
{
'member_of': resource_gst._id,
'group_set': ObjectId(group_id),
'status': u"PUBLISHED"
}
)
for each in res:
list_resources.append(each)
drawer_template_context = edit_drawer_widget("CourseUnits", group_id, unit_node, None, checked="collection_set", left_drawer_content=list_resources)
drawer_template_context["widget_for"] = widget_for
drawer_widget = render_to_string(
'ndf/drawer_widget.html',
drawer_template_context,
context_instance=RequestContext(request)
)
return HttpResponse(drawer_widget)
else:
error_message = "Resource Drawer: Either not an ajax call or not a POST request!!!"
response_dict["message"] = error_message
return HttpResponse(json.dumps(response_dict))
except Exception as e:
error_message = "Resource Drawer: " + str(e) + "!!!"
response_dict["message"] = error_message
return HttpResponse(json.dumps(response_dict))
@login_required
def save_resources(request, group_id):
'''
Accepts:
* List of resources (i.e GSystem of Page, File, etc.)
* CourseSubSection node _id
Actions:
* Sets the received resources in respective node's collection_set
'''
response_dict = {"success": False,"create_new_unit": True}
if request.is_ajax() and request.method == "POST":
list_of_res = json.loads(request.POST.get('list_of_res', ""))
css_node_id = request.POST.get('css_node', "")
unit_name = request.POST.get('unit_name', "")
unit_name = unit_name.strip()
unit_node_id = request.POST.get('unit_node_id', "")
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
list_of_res_ids = [ObjectId(each_res) for each_res in list_of_res]
try:
cu_new = node_collection.one({'_id': ObjectId(unit_node_id)})
except:
cu_new = None
if not cu_new:
cu_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseUnit"})
cu_new = node_collection.collection.GSystem()
cu_new.member_of.append(cu_gst._id)
# set name
cu_new.name = unit_name.strip()
cu_new.modified_by = int(request.user.id)
cu_new.created_by = int(request.user.id)
cu_new.contributors.append(int(request.user.id))
cu_new.prior_node.append(css_node._id)
cu_new.save()
response_dict["create_new_unit"] = True
node_collection.collection.update({'_id': cu_new._id}, {'$set': {'name': unit_name }}, upsert=False, multi=False)
if cu_new._id not in css_node.collection_set:
node_collection.collection.update({'_id': css_node._id}, {'$push': {'collection_set': cu_new._id }}, upsert=False, multi=False)
node_collection.collection.update({'_id': cu_new._id}, {'$set': {'collection_set':list_of_res_ids}},upsert=False,multi=False)
cu_new.reload()
response_dict["success"] = True
response_dict["cu_new_id"] = str(cu_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def create_edit_unit(request, group_id):
'''
Accepts:
* ObjectId of unit node if exists
* ObjectId of CourseSubSection node
Actions:
* Creates/Updates Unit node
Returns:
* success (i.e True/False)
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
css_node_id = request.POST.get("css_node_id", '')
unit_node_id = request.POST.get("unit_node_id", '')
unit_name = request.POST.get("unit_name", '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
try:
cu_node = node_collection.one({'_id': ObjectId(unit_node_id)})
except:
cu_node = None
if cu_node is None:
cu_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseUnit"})
cu_node = node_collection.collection.GSystem()
cu_node.member_of.append(cu_gst._id)
# set name
cu_node.name = unit_name.strip()
cu_node.modified_by = int(request.user.id)
cu_node.created_by = int(request.user.id)
cu_node.contributors.append(int(request.user.id))
cu_node.prior_node.append(css_node._id)
cu_node.save()
response_dict["unit_node_id"] = str(cu_node._id)
node_collection.collection.update({'_id': cu_node._id}, {'$set': {'name': unit_name}}, upsert=False, multi=False)
if cu_node._id not in css_node.collection_set:
node_collection.collection.update({'_id': css_node._id}, {'$push': {'collection_set': cu_node._id}}, upsert=False, multi=False)
return HttpResponse(json.dumps(response_dict))
@login_required
def delete_from_course_structure(request, group_id):
'''
Accepts:
* ObjectId of node that is to be deleted.
It can be CourseSection/CourseSubSection/CourseUnit
Actions:
* Deletes the received node
Returns:
* success (i.e True/False)
'''
response_dict = {"success": False}
del_stat = False
if request.is_ajax() and request.method == "POST":
oid = request.POST.get("oid", '')
del_stat = delete_item(oid)
if del_stat:
response_dict["success"] = True
return HttpResponse(json.dumps(response_dict))
def delete_item(item):
node_item = node_collection.one({'_id': ObjectId(item)})
if u"CourseUnit" not in node_item.member_of_names_list and node_item.collection_set:
for each in node_item.collection_set:
d_st = delete_item(each)
del_status, del_status_msg = delete_node(
node_id=node_item._id,
deletion_type=0
)
return del_status
@login_required
def publish_course(request, group_id):
if request.is_ajax() and request.method == "POST":
try:
node_id = request.POST.get("node_id", "")
node = node_collection.one({'_id': ObjectId(node_id)})
node.status = unicode("PUBLISHED")
node.modified_by = int(request.user.id)
node.save()
except:
return HttpResponse("Fail")
return HttpResponse("Success")
| sunnychaudhari/gstudio | gnowsys-ndf/gnowsys_ndf/ndf/views/course.py | Python | agpl-3.0 | 54,618 |
import numpy as np
import math
import sys
import os
sys.path.insert(0,os.environ['learningml']+'/GoF/')
import classifier_eval
from classifier_eval import name_to_nclf, nclf, experiment, make_keras_model
from sklearn import tree
from sklearn.ensemble import AdaBoostClassifier
from sklearn.svm import SVC
from rep.estimators import XGBoostClassifier
from keras.wrappers.scikit_learn import KerasClassifier
import time
#nclf_list = [nclf(), name_to_nclf("bdt"), nclf('xgb',XGBoostClassifier(),['n_estimators','eta'], [[10,1000],[0.01,1.0]]) ]
#nclf_list = [name_to_nclf("nn")]
#nclf_list = [nclf('nn',"no classifier needed for nn", ['n_hidden_layers','dimof_middle'], [[0,1],[100,500]],param_opt=[0,500])]
nclf_list = [name_to_nclf("bdt"), name_to_nclf("xgb"), name_to_nclf("svm"), name_to_nclf("nn")]
#nclf_list = [nclf('bdt',AdaBoostClassifier(base_estimator=tree.DecisionTreeClassifier(max_depth=2)), ['learning_rate','n_estimators'], [[0.01,2.0],[1,1000]], param_opt=[0.0190, 837]), nclf('xgb',XGBoostClassifier(), ['n_estimators','eta'], [[10,1000],[0.01,1.0]], param_opt=[15, 0.59]), nclf('nn',"no classifier needed for nn", ['n_hidden_layers','dimof_middle'], [[0,1],[100,500]],param_opt=[0, 309]),nclf('svm',SVC(probability=True, cache_size=7000), ['C','gamma'], [[1.0,1000.0],[1E-6,0.1]], [331.4,1.445E-5 ] )]
systematics_fraction = 0.01
param_name_list = ["alphaSvalue"]
param_list = [0.125,0.130,0.132,0.133,0.134,0.135,0.14] # alphaSvalue
#param_list = [0.130]
param_to_optimise = 0.133
param_monash = 0.1365
file_name_patterns= [ os.environ['monash']+"/GoF_input/GoF_input_udsc_monash_lower_level_{1}.txt", os.environ['monash']+"/GoF_input/GoF_input_udsc_"+str(param_to_optimise)+ "_"+param_name_list[0]+"_lower_level_{1}.txt"]
name_CPV= "monash__CPV"
name_noCPV= "monash__noCPV"
title_CPV="monash alphaS "+str(param_to_optimise)
title_noCPV = "monash monash "
directory_name = "_monash_lower_level_2files_attempt3"
# possible : 'opt', 'eval', 'plot' or combination thereof
MODE= 'opt_eval'
start_time = time.time()
if 'opt' in MODE:
expt = experiment(nclf_list=nclf_list, file_name_patterns=file_name_patterns, scoring='chi2',single_no_bins_list = [5], systematics_fraction = systematics_fraction, only_mod=True, title_CPV=title_CPV, title_noCPV=title_noCPV, name_CPV=name_CPV, name_noCPV=name_noCPV, directory_name=directory_name)
expt.optimise(optimisation_dimension = 8, number_of_iterations=50)
evaluation_start_time = time.time()
print(50*"-"+"\noptimisation took ", (evaluation_start_time - start_time)/60. , " minutes\n" +50*"-")
#print(nclf_list[0].param_list)
if 'eval' in MODE:
for param in param_list:
file_name_patterns= [ os.environ['monash']+"/GoF_input/GoF_input_udsc_monash_lower_level_{1}.txt", os.environ['monash']+"/GoF_input/GoF_input_udsc_"+str(param)+"_"+param_name_list[0]+"_lower_level_{1}.txt"]
name_CPV= "monash_"+str(param)+"_"+param_name_list[0]+"_lower_level"
name_noCPV= "monash_"+str(param_monash)+"_"+param_name_list[0]+"_lower_level"
title_CPV = "Monash "+str(param)+" "+param_name_list[0]
title_noCPV="Monash "+str(param_monash)+" "+param_name_list[0]
directory_name = "_monash_lower_level_2files_attempt3"
if param == param_list[-1]: only_mod=False
else: only_mod = True
expt = experiment(nclf_list=nclf_list, file_name_patterns=file_name_patterns, scoring='chi2_crossval',single_no_bins_list = [5], systematics_fraction = systematics_fraction, only_mod=only_mod, title_CPV=title_CPV, title_noCPV=title_noCPV, name_CPV=name_CPV, name_noCPV=name_noCPV, directory_name=directory_name)
expt.evaluate(evaluation_dimensions = [param], keras_evaluation_dimensions = [8], number_of_evaluations=100)
end_time = time.time()
print(50*"-"+"\nevaluation took ", (end_time - evaluation_start_time)/60. , " minutes\n" +50*"-")
| weissercn/learningml | learningml/GoF/optimisation_and_evaluation/automatisation_monash_alphaSvalue_lower_level/automatisation_monash_alphaSvalue_low_level_optimisation_and_evaluation.py | Python | mit | 3,831 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class SubnetPaged(Paged):
"""
A paging container for iterating over a list of :class:`Subnet <azure.mgmt.network.v2017_11_01.models.Subnet>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[Subnet]'}
}
def __init__(self, *args, **kwargs):
super(SubnetPaged, self).__init__(*args, **kwargs)
| lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/subnet_paged.py | Python | mit | 922 |
# -*- encoding: utf-8 -*-
from abjad.tools import scoretools
# TODO: remove in favor of layouttools.set_line_breaks_by_line_duration()
def set_line_breaks_by_line_duration_ge(
expr,
line_duration,
line_break_class=None,
add_empty_bars=False,
):
r'''Iterate `line_break_class` instances in `expr` and
accumulate duration.
Add line break after every total less than or equal to `line_duration`:
::
>>> staff = Staff()
>>> staff.append(Measure((2, 8), "c'8 d'8"))
>>> staff.append(Measure((2, 8), "e'8 f'8"))
>>> staff.append(Measure((2, 8), "g'8 a'8"))
>>> staff.append(Measure((2, 8), "b'8 c''8"))
>>> show(staff) # doctest: +SKIP
.. doctest::
>>> print(format(staff))
\new Staff {
{
\time 2/8
c'8
d'8
}
{
e'8
f'8
}
{
g'8
a'8
}
{
b'8
c''8
}
}
::
>>> layouttools.set_line_breaks_by_line_duration_ge(
... staff,
... Duration(4, 8),
... )
>>> show(staff) # doctest: +SKIP
::
>>> print(format(staff))
\new Staff {
{
\time 2/8
c'8
d'8
}
{
e'8
f'8
\break
}
{
g'8
a'8
}
{
b'8
c''8
\break
}
}
When ``line_break_class=None`` set `line_break_class` to measure.
'''
from abjad.tools import layouttools
if line_break_class is None:
line_break_class = scoretools.Measure
layouttools.set_line_breaks_by_line_duration(
expr,
line_duration,
line_break_class,
'prolated',
add_empty_bars=add_empty_bars,
)
| mscuthbert/abjad | abjad/tools/layouttools/set_line_breaks_by_line_duration_ge.py | Python | gpl-3.0 | 2,086 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import smtplib
import string
HOST="smtp.163.com" #使用的邮箱的smtp服务器地址,这里是163的smtp地址
SUBJECT = "test email from python" #定义邮件的主题
#TO = "2213561999@qq.com" #定义收件收件人
TO = "zhangyage@yazhoujuejin.com"
FROM="zhangyage2015@163.com" #定义发件人
password="zhang19910610" #密码
text = "python rules them all!"
#定义和组装邮件的主题内容
BODY = string.join(("From:%s" % FROM,
"TO:%s" % TO,
"Subject:%s" % SUBJECT,
"",
text
),'\r\n')
server = smtplib.SMTP() #创建一个SMTP对象
server.connect(HOST, "25") #通过connect方法链接smtp主机
server.starttls() #启动发圈传输模式
server.login("zhangyage2015@163.com", password) #邮箱校验
server.sendmail(FROM, [TO], BODY) #邮件发送
server.quit() #端口smtp链接
| zhangyage/Python-oldboy | python-auto/mail/163_send2.py | Python | apache-2.0 | 1,124 |
#!/usr/bin/env python3
#
# Gedit Scheme Editor
# https://github.com/jonocodes/GeditSchemer
#
# Copyright (C) Jono Finger 2013 <jono@foodnotblogs.com>
#
# The program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# The program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import gi
gi.require_version('Gedit', '3.0')
gi.require_version('Gtk', '3.0')
from gi.repository import GObject, Gio, Gedit, Gtk
import os
from .schemer import GUI
try:
import gettext
gettext.bindtextdomain('gedit-plugins')
gettext.textdomain('gedit-plugins')
_ = gettext.gettext
except:
_ = lambda s: s
class AppActivatable(GObject.Object, Gedit.AppActivatable):
app = GObject.Property(type=Gedit.App)
def __init__(self):
GObject.Object.__init__(self)
def do_activate(self):
action = Gio.SimpleAction(name="schemer")
action.connect('activate', self.open_dialog)
self.app.add_action(action)
self.menu_ext = self.extend_menu("preferences-section")
item = Gio.MenuItem.new(_("Color Scheme Editor"), "app.schemer")
self.menu_ext.append_menu_item(item)
def do_deactivate(self):
self.app.remove_action("schemer")
self.menu_ext = None
def open_dialog(self, action, parameter, data=None):
GUI(Gedit.App, os.path.join(self.plugin_info.get_data_dir(), 'ui'))
| GNOME/gedit-plugins | plugins/colorschemer/schemer/__init__.py | Python | gpl-2.0 | 1,817 |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import os
import sys
import time
from twitter.common import app
from twitter.common.dirutil import tail_f
from twitter.common.dirutil.tail import tail as tail_closed
from apache.thermos.cli.common import get_path_detector
from apache.thermos.common.ckpt import CheckpointDispatcher
from apache.thermos.common.path import TaskPath
from apache.thermos.monitoring.detector import TaskDetector
from apache.thermos.monitoring.monitor import TaskMonitor
@app.command
@app.command_option("--stderr", default=False, dest='use_stderr', action='store_true',
help="Tail stderr instead of stdout")
def tail(args, options):
"""Tail the logs of a task process.
Usage: thermos tail task_name [process_name]
"""
if len(args) == 0:
app.error('Expected a task to tail, got nothing!')
if len(args) not in (1, 2):
app.error('Expected at most two arguments (task and optional process), got %d' % len(args))
task_id = args[0]
path_detector = get_path_detector()
for root in path_detector.get_paths():
detector = TaskDetector(root=root)
checkpoint = CheckpointDispatcher.from_file(detector.get_checkpoint(task_id))
if checkpoint:
break
else:
print('ERROR: Could not find task.')
sys.exit(1)
log_dir = checkpoint.header.log_dir
process_runs = [(process, run) for (process, run) in detector.get_process_runs(task_id, log_dir)]
if len(args) == 2:
process_runs = [(process, run) for (process, run) in process_runs if process == args[1]]
if len(process_runs) == 0:
print('ERROR: No processes found.', file=sys.stderr)
sys.exit(1)
processes = set([process for process, _ in process_runs])
if len(processes) != 1:
print('ERROR: More than one process matches query.', file=sys.stderr)
sys.exit(1)
process = processes.pop()
run = max([run for _, run in process_runs])
logdir = TaskPath(root=root, task_id=args[0], process=process,
run=run, log_dir=log_dir).getpath('process_logdir')
logfile = os.path.join(logdir, 'stderr' if options.use_stderr else 'stdout')
monitor = TaskMonitor(root, args[0])
def log_is_active():
active_processes = monitor.get_active_processes()
for process_status, process_run in active_processes:
if process_status.process == process and process_run == run:
return True
return False
if not log_is_active():
print('Tail of terminal log %s' % logfile)
for line in tail_closed(logfile):
print(line.rstrip())
return
now = time.time()
next_check = now + 5.0
print('Tail of active log %s' % logfile)
for line in tail_f(logfile, include_last=True, forever=False):
print(line.rstrip())
if time.time() > next_check:
if not log_is_active():
break
else:
next_check = time.time() + 5.0
| thinker0/aurora | src/main/python/apache/thermos/cli/commands/tail.py | Python | apache-2.0 | 3,381 |
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 9 13:55:16 2015
@author: adelpret
"""
from numpy import zeros as zeros
NJ = 30;
kp_pos = zeros(NJ); # joint position control proportional gains
kd_pos = zeros(NJ); # joint position control derivative gains
ki_pos = zeros(NJ); # joint position control integral gains
pwm_max = zeros(NJ);
# PARAMETERS OF R_hip_y JOINT 0
kp_pos[0] = 800;
# PARAMETERS OF R_hip_r JOINT 1
kp_pos[1] = 800;
# PARAMETERS OF R_hip_p JOINT 2
kp_pos[2] = 800;
# PARAMETERS OF R_knee JOINT 3
kp_pos[3] = 800;
# PARAMETERS OF R_ankle pitch JOINT 4
kp_pos[4] = 800;
# PARAMETERS OF R_ankle roll JOINT 5
kp_pos[5] = 800;
# PARAMETERS OF L_hip_y JOINT 6
kp_pos[6] = 800;
# PARAMETERS OF L_hip_r JOINT 7
kp_pos[7] = 800;
# PARAMETERS OF L_hip_p JOINT 8
kp_pos[8] = 800;
# PARAMETERS OF L_knee JOINT 9
kp_pos[9] = 800;
# PARAMETERS OF L_ankle pitch JOINT 10
kp_pos[10] = 800;
# PARAMETERS OF L_ankle roll JOINT 11
kp_pos[11] = 800;
# PARAMTERS of torso yaw and pitch (12, 13)
kp_pos[12] = 800;
kp_pos[13] = 800;
# PARAMTERS of head yaw and pitch (14, 15)
kp_pos[14] = 50;
kp_pos[15] = 50;
# PARAMETERS OF right shoulder pitch JOINT 16
kp_pos[16] = 500;
# PARAMETERS OF R_shoulder roll JOINT 17
kp_pos[17] = 500; #
# PARAMETERS OF R_shoulder yaw JOINT 18
kp_pos[18] = 500; #
# PARAMETERS OF right elbow joint 19
kp_pos[19] = 500
# PARAMETERS OF right wrist yaw joint 20
kp_pos[20] = 500
# PARAMETERS OF right wrist pitch joint 21
kp_pos[21] = 500
# PARAMETERS OF right hand joint 22
kp_pos[22] = 50
# PARAMETERS OF left shoulder pitch JOINT 23
kp_pos[23] = 500;
# PARAMETERS OF L_shoulder roll JOINT 24
kp_pos[24] = 500; #
# PARAMETERS OF L_shoulder yaw JOINT 25
kp_pos[25] = 500; #
# PARAMETERS OF left elbow joint 26
kp_pos[26] = 500
# PARAMETERS OF left wrist yaw joint 27
kp_pos[27] = 500
# PARAMETERS OF left wrist pitch joint 28
kp_pos[28] = 500
# PARAMETERS OF left hand joint 29
kp_pos[29] = 50
| andreadelprete/sot-torque-control | python/hrp2_joint_pos_ctrl_gains.py | Python | lgpl-3.0 | 1,941 |
import os
import tuned.logs
from . import base
from tuned.utils.commands import commands
log = tuned.logs.get()
class cpulist2hex(base.Function):
"""
Conversion function: converts CPU list to hexadecimal CPU mask
"""
def __init__(self):
# arbitrary number of arguments
super(cpulist2hex, self).__init__("cpulist2hex", 0)
def execute(self, args):
if not super(cpulist2hex, self).execute(args):
return None
return self._cmd.cpulist2hex(",,".join(args))
| redhat-performance/tuned | tuned/profiles/functions/function_cpulist2hex.py | Python | gpl-2.0 | 470 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The tab switching measurement.
This measurement opens pages in different tabs. After all the tabs have opened,
it cycles through each tab in sequence, and records a histogram of the time
between when a tab was first requested to be shown, and when it was painted.
"""
from metrics import histogram_util
from telemetry.core import util
from telemetry.page import page_measurement
from telemetry.page import page_runner
# TODO: Revisit this test once multitab support is finalized.
class TabSwitching(page_measurement.PageMeasurement):
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--enable-stats-collection-bindings')
options.AppendExtraBrowserArg('--dom-automation')
def CanRunForPage(self, page):
return not page.page_set.pages.index(page)
def DidNavigateToPage(self, page, tab):
for i in xrange(1, len(page.page_set.pages)):
t = tab.browser.tabs.New()
page_state = page_runner.PageState()
page_state.PreparePage(page.page_set.pages[i], t)
def MeasurePage(self, _, tab, results):
"""Although this is called MeasurePage, we're actually using this function
to cycle through each tab that was opened via DidNavigateToPage and
thenrecord a single histogram for the tab switching metric.
"""
histogram_name = 'MPArch.RWH_TabSwitchPaintDuration'
histogram_type = 'getBrowserHistogram'
first_histogram = histogram_util.GetHistogramFromDomAutomation(
histogram_type, histogram_name, tab)
prev_histogram = first_histogram
for i in xrange(len(tab.browser.tabs)):
t = tab.browser.tabs[i]
t.Activate()
def _IsDone():
cur_histogram = histogram_util.GetHistogramFromDomAutomation(
histogram_type, histogram_name, tab)
diff_histogram = histogram_util.SubtractHistogram(
cur_histogram, prev_histogram)
return diff_histogram
util.WaitFor(_IsDone, 30)
prev_histogram = histogram_util.GetHistogramFromDomAutomation(
histogram_type, histogram_name, tab)
last_histogram = histogram_util.GetHistogramFromDomAutomation(
histogram_type, histogram_name, tab)
diff_histogram = histogram_util.SubtractHistogram(last_histogram,
first_histogram)
results.AddSummary(histogram_name, '', diff_histogram,
data_type='unimportant-histogram')
| aospx-kitkat/platform_external_chromium_org | tools/perf/measurements/tab_switching.py | Python | bsd-3-clause | 2,524 |
import ConfigurationFileProbe
| seblefevre/testerman | plugins/probes/configurationfile/__init__.py | Python | gpl-2.0 | 30 |
from __future__ import unicode_literals
from collections import OrderedDict
import datetime
from operator import attrgetter
import pickle
import unittest
import warnings
from django.core.exceptions import FieldError
from django.db import connection, DEFAULT_DB_ALIAS
from django.db.models import Count, F, Q
from django.db.models.sql.where import WhereNode, EverythingNode, NothingNode
from django.db.models.sql.constants import LOUTER
from django.db.models.sql.datastructures import EmptyResultSet
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils import six
from django.utils.six.moves import range
from .models import (
Annotation, Article, Author, Celebrity, Child, Cover, Detail, DumbCategory,
ExtraInfo, Fan, Item, LeafA, Join, LeafB, LoopX, LoopZ, ManagedModel,
Member, NamedCategory, Note, Number, Plaything, PointerA, Ranking, Related,
Report, ReservedName, Tag, TvChef, Valid, X, Food, Eaten, Node, ObjectA,
ProxyObjectA, ChildObjectA, ObjectB, ProxyObjectB, ObjectC, CategoryItem,
SimpleCategory, SpecialCategory, OneToOneCategory, NullableName, ProxyCategory,
SingleObject, RelatedObject, ModelA, ModelB, ModelC, ModelD, Responsibility, Job,
JobResponsibilities, BaseA, FK1, Identifier, Program, Channel, Page, Paragraph,
Chapter, Book, MyObject, Order, OrderItem, SharedConnection, Task, Staff,
StaffUser, CategoryRelationship, Ticket21203Parent, Ticket21203Child, Person,
Company, Employment, CustomPk, CustomPkTag, Classroom, School, Student,
Ticket23605A, Ticket23605B, Ticket23605C)
class BaseQuerysetTest(TestCase):
def assertValueQuerysetEqual(self, qs, values):
return self.assertQuerysetEqual(qs, values, transform=lambda x: x)
class Queries1Tests(BaseQuerysetTest):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name='t1', category=generic)
cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name='t3', parent=cls.t1)
t4 = Tag.objects.create(name='t4', parent=cls.t3)
cls.t5 = Tag.objects.create(name='t5', parent=cls.t3)
cls.n1 = Note.objects.create(note='n1', misc='foo', id=1)
n2 = Note.objects.create(note='n2', misc='bar', id=2)
cls.n3 = Note.objects.create(note='n3', misc='foo', id=3)
ann1 = Annotation.objects.create(name='a1', tag=cls.t1)
ann1.notes.add(cls.n1)
ann2 = Annotation.objects.create(name='a2', tag=t4)
ann2.notes.add(n2, cls.n3)
# Create these out of order so that sorting by 'id' will be different to sorting
# by 'info'. Helps detect some problems later.
cls.e2 = ExtraInfo.objects.create(info='e2', note=n2, value=41)
e1 = ExtraInfo.objects.create(info='e1', note=cls.n1, value=42)
cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1)
cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1)
a3 = Author.objects.create(name='a3', num=3003, extra=cls.e2)
cls.a4 = Author.objects.create(name='a4', num=4004, extra=cls.e2)
cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
cls.i1 = Item.objects.create(name='one', created=cls.time1, modified=cls.time1, creator=cls.a1, note=cls.n3)
cls.i1.tags = [cls.t1, cls.t2]
cls.i2 = Item.objects.create(name='two', created=cls.time2, creator=cls.a2, note=n2)
cls.i2.tags = [cls.t1, cls.t3]
cls.i3 = Item.objects.create(name='three', created=time3, creator=cls.a2, note=cls.n3)
i4 = Item.objects.create(name='four', created=time4, creator=cls.a4, note=cls.n3)
i4.tags = [t4]
cls.r1 = Report.objects.create(name='r1', creator=cls.a1)
Report.objects.create(name='r2', creator=a3)
Report.objects.create(name='r3')
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
# will be rank3, rank2, rank1.
cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2)
Cover.objects.create(title="first", item=i4)
Cover.objects.create(title="second", item=cls.i2)
def test_subquery_condition(self):
qs1 = Tag.objects.filter(pk__lte=0)
qs2 = Tag.objects.filter(parent__in=qs1)
qs3 = Tag.objects.filter(parent__in=qs2)
self.assertEqual(qs3.query.subq_aliases, {'T', 'U', 'V'})
self.assertIn('v0', str(qs3.query).lower())
qs4 = qs3.filter(parent__in=qs1)
self.assertEqual(qs4.query.subq_aliases, {'T', 'U', 'V'})
# It is possible to reuse U for the second subquery, no need to use W.
self.assertNotIn('w0', str(qs4.query).lower())
# So, 'U0."id"' is referenced twice.
self.assertTrue(str(qs4.query).lower().count('u0'), 2)
def test_ticket1050(self):
self.assertQuerysetEqual(
Item.objects.filter(tags__isnull=True),
['<Item: three>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__id__isnull=True),
['<Item: three>']
)
def test_ticket1801(self):
self.assertQuerysetEqual(
Author.objects.filter(item=self.i2),
['<Author: a2>']
)
self.assertQuerysetEqual(
Author.objects.filter(item=self.i3),
['<Author: a2>']
)
self.assertQuerysetEqual(
Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3),
['<Author: a2>']
)
def test_ticket2306(self):
# Checking that no join types are "left outer" joins.
query = Item.objects.filter(tags=self.t2).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).order_by('name'),
['<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)),
['<Item: one>']
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(creator__name='fred') | Q(tags=self.t2)),
['<Item: one>']
)
# Each filter call is processed "at once" against a single table, so this is
# different from the previous example as it tries to find tags that are two
# things at once (rather than two tags).
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)),
[]
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1), Q(creator__name='fred') | Q(tags=self.t2)),
[]
)
qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id)
self.assertQuerysetEqual(list(qs), ['<Author: a2>'])
self.assertEqual(2, qs.query.count_active_tables(), 2)
qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id)
self.assertEqual(qs.query.count_active_tables(), 3)
def test_ticket4464(self):
self.assertQuerysetEqual(
Item.objects.filter(tags=self.t1).filter(tags=self.t2),
['<Item: one>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name'),
['<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3),
['<Item: two>']
)
# Make sure .distinct() works with slicing (this was broken in Oracle).
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).order_by('name')[:3],
['<Item: one>', '<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name')[:3],
['<Item: one>', '<Item: two>']
)
def test_tickets_2080_3592(self):
self.assertQuerysetEqual(
Author.objects.filter(item__name='one') | Author.objects.filter(name='a3'),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(item__name='one') | Q(name='a3')),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(name='a3') | Q(item__name='one')),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(item__name='three') | Q(report__name='r3')),
['<Author: a2>']
)
def test_ticket6074(self):
# Merging two empty result sets shouldn't leave a queryset with no constraints
# (which would match everything).
self.assertQuerysetEqual(Author.objects.filter(Q(id__in=[])), [])
self.assertQuerysetEqual(
Author.objects.filter(Q(id__in=[]) | Q(id__in=[])),
[]
)
def test_tickets_1878_2939(self):
self.assertEqual(Item.objects.values('creator').distinct().count(), 3)
# Create something with a duplicate 'name' so that we can test multi-column
# cases (which require some tricky SQL transformations under the covers).
xx = Item(name='four', created=self.time1, creator=self.a2, note=self.n1)
xx.save()
self.assertEqual(
Item.objects.exclude(name='two').values('creator', 'name').distinct().count(),
4
)
self.assertEqual(
Item.objects.exclude(name='two').extra(select={'foo': '%s'}, select_params=(1,)).values('creator', 'name', 'foo').distinct().count(),
4
)
self.assertEqual(
Item.objects.exclude(name='two').extra(select={'foo': '%s'}, select_params=(1,)).values('creator', 'name').distinct().count(),
4
)
xx.delete()
def test_ticket7323(self):
self.assertEqual(Item.objects.values('creator', 'name').count(), 4)
def test_ticket2253(self):
q1 = Item.objects.order_by('name')
q2 = Item.objects.filter(id=self.i1.id)
self.assertQuerysetEqual(
q1,
['<Item: four>', '<Item: one>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual(q2, ['<Item: one>'])
self.assertQuerysetEqual(
(q1 | q2).order_by('name'),
['<Item: four>', '<Item: one>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual((q1 & q2).order_by('name'), ['<Item: one>'])
q1 = Item.objects.filter(tags=self.t1)
q2 = Item.objects.filter(note=self.n3, tags=self.t2)
q3 = Item.objects.filter(creator=self.a4)
self.assertQuerysetEqual(
((q1 & q2) | q3).order_by('name'),
['<Item: four>', '<Item: one>']
)
def test_order_by_tables(self):
q1 = Item.objects.order_by('name')
q2 = Item.objects.filter(id=self.i1.id)
list(q2)
combined_query = (q1 & q2).order_by('name').query
self.assertEqual(len([
t for t in combined_query.tables if combined_query.alias_refcount[t]
]), 1)
def test_order_by_join_unref(self):
"""
This test is related to the above one, testing that there aren't
old JOINs in the query.
"""
qs = Celebrity.objects.order_by('greatest_fan__fan_of')
self.assertIn('OUTER JOIN', str(qs.query))
qs = qs.order_by('id')
self.assertNotIn('OUTER JOIN', str(qs.query))
def test_tickets_4088_4306(self):
self.assertQuerysetEqual(
Report.objects.filter(creator=1001),
['<Report: r1>']
)
self.assertQuerysetEqual(
Report.objects.filter(creator__num=1001),
['<Report: r1>']
)
self.assertQuerysetEqual(Report.objects.filter(creator__id=1001), [])
self.assertQuerysetEqual(
Report.objects.filter(creator__id=self.a1.id),
['<Report: r1>']
)
self.assertQuerysetEqual(
Report.objects.filter(creator__name='a1'),
['<Report: r1>']
)
def test_ticket4510(self):
self.assertQuerysetEqual(
Author.objects.filter(report__name='r1'),
['<Author: a1>']
)
def test_ticket7378(self):
self.assertQuerysetEqual(self.a1.report_set.all(), ['<Report: r1>'])
def test_tickets_5324_6704(self):
self.assertQuerysetEqual(
Item.objects.filter(tags__name='t4'),
['<Item: four>']
)
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t4').order_by('name').distinct(),
['<Item: one>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t4').order_by('name').distinct().reverse(),
['<Item: two>', '<Item: three>', '<Item: one>']
)
self.assertQuerysetEqual(
Author.objects.exclude(item__name='one').distinct().order_by('name'),
['<Author: a2>', '<Author: a3>', '<Author: a4>']
)
# Excluding across a m2m relation when there is more than one related
# object associated was problematic.
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t1').order_by('name'),
['<Item: four>', '<Item: three>']
)
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t1').exclude(tags__name='t4'),
['<Item: three>']
)
# Excluding from a relation that cannot be NULL should not use outer joins.
query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
# Similarly, when one of the joins cannot possibly, ever, involve NULL
# values (Author -> ExtraInfo, in the following), it should never be
# promoted to a left outer join. So the following query should only
# involve one "left outer" join (Author -> Item is 0-to-many).
qs = Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3))
self.assertEqual(
len([x for x in qs.query.alias_map.values() if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias]]),
1
)
# The previous changes shouldn't affect nullable foreign key joins.
self.assertQuerysetEqual(
Tag.objects.filter(parent__isnull=True).order_by('name'),
['<Tag: t1>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(parent__isnull=True).order_by('name'),
['<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(Q(parent__name='t1') | Q(parent__isnull=True)).order_by('name'),
['<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name='t1')).order_by('name'),
['<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by('name'),
['<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by('name'),
['<Tag: t4>', '<Tag: t5>']
)
def test_ticket2091(self):
t = Tag.objects.get(name='t4')
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[t]),
['<Item: four>']
)
def test_avoid_infinite_loop_on_too_many_subqueries(self):
x = Tag.objects.filter(pk=1)
local_recursion_limit = 127
msg = 'Maximum recursion depth exceeded: too many subqueries.'
with self.assertRaisesMessage(RuntimeError, msg):
for i in six.moves.range(local_recursion_limit * 2):
x = Tag.objects.filter(pk__in=x)
def test_reasonable_number_of_subq_aliases(self):
x = Tag.objects.filter(pk=1)
for _ in range(20):
x = Tag.objects.filter(pk__in=x)
self.assertEqual(
x.query.subq_aliases, {
'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'AB', 'AC', 'AD',
'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK', 'AL', 'AM', 'AN',
}
)
def test_heterogeneous_qs_combination(self):
# Combining querysets built on different models should behave in a well-defined
# fashion. We raise an error.
self.assertRaisesMessage(
AssertionError,
'Cannot combine queries on two different base models.',
lambda: Author.objects.all() & Tag.objects.all()
)
self.assertRaisesMessage(
AssertionError,
'Cannot combine queries on two different base models.',
lambda: Author.objects.all() | Tag.objects.all()
)
def test_ticket3141(self):
self.assertEqual(Author.objects.extra(select={'foo': '1'}).count(), 4)
self.assertEqual(
Author.objects.extra(select={'foo': '%s'}, select_params=(1,)).count(),
4
)
def test_ticket2400(self):
self.assertQuerysetEqual(
Author.objects.filter(item__isnull=True),
['<Author: a3>']
)
self.assertQuerysetEqual(
Tag.objects.filter(item__isnull=True),
['<Tag: t5>']
)
def test_ticket2496(self):
self.assertQuerysetEqual(
Item.objects.extra(tables=['queries_author']).select_related().order_by('name')[:1],
['<Item: four>']
)
def test_tickets_2076_7256(self):
# Ordering on related tables should be possible, even if the table is
# not otherwise involved.
self.assertQuerysetEqual(
Item.objects.order_by('note__note', 'name'),
['<Item: two>', '<Item: four>', '<Item: one>', '<Item: three>']
)
# Ordering on a related field should use the remote model's default
# ordering as a final step.
self.assertQuerysetEqual(
Author.objects.order_by('extra', '-name'),
['<Author: a2>', '<Author: a1>', '<Author: a4>', '<Author: a3>']
)
# Using remote model default ordering can span multiple models (in this
# case, Cover is ordered by Item's default, which uses Note's default).
self.assertQuerysetEqual(
Cover.objects.all(),
['<Cover: first>', '<Cover: second>']
)
# If the remote model does not have a default ordering, we order by its 'id'
# field.
self.assertQuerysetEqual(
Item.objects.order_by('creator', 'name'),
['<Item: one>', '<Item: three>', '<Item: two>', '<Item: four>']
)
# Ordering by a many-valued attribute (e.g. a many-to-many or reverse
# ForeignKey) is legal, but the results might not make sense. That
# isn't Django's problem. Garbage in, garbage out.
self.assertQuerysetEqual(
Item.objects.filter(tags__isnull=False).order_by('tags', 'id'),
['<Item: one>', '<Item: two>', '<Item: one>', '<Item: two>', '<Item: four>']
)
# If we replace the default ordering, Django adjusts the required
# tables automatically. Item normally requires a join with Note to do
# the default ordering, but that isn't needed here.
qs = Item.objects.order_by('name')
self.assertQuerysetEqual(
qs,
['<Item: four>', '<Item: one>', '<Item: three>', '<Item: two>']
)
self.assertEqual(len(qs.query.tables), 1)
def test_tickets_2874_3002(self):
qs = Item.objects.select_related().order_by('note__note', 'name')
self.assertQuerysetEqual(
qs,
['<Item: two>', '<Item: four>', '<Item: one>', '<Item: three>']
)
# This is also a good select_related() test because there are multiple
# Note entries in the SQL. The two Note items should be different.
self.assertTrue(repr(qs[0].note), '<Note: n2>')
self.assertEqual(repr(qs[0].creator.extra.note), '<Note: n1>')
def test_ticket3037(self):
self.assertQuerysetEqual(
Item.objects.filter(Q(creator__name='a3', name='two') | Q(creator__name='a4', name='four')),
['<Item: four>']
)
def test_tickets_5321_7070(self):
# Ordering columns must be included in the output columns. Note that
# this means results that might otherwise be distinct are not (if there
# are multiple values in the ordering cols), as in this example. This
# isn't a bug; it's a warning to be careful with the selection of
# ordering columns.
self.assertValueQuerysetEqual(
Note.objects.values('misc').distinct().order_by('note', '-misc'),
[{'misc': 'foo'}, {'misc': 'bar'}, {'misc': 'foo'}]
)
def test_ticket4358(self):
# If you don't pass any fields to values(), relation fields are
# returned as "foo_id" keys, not "foo". For consistency, you should be
# able to pass "foo_id" in the fields list and have it work, too. We
# actually allow both "foo" and "foo_id".
# The *_id version is returned by default.
self.assertIn('note_id', ExtraInfo.objects.values()[0])
# You can also pass it in explicitly.
self.assertValueQuerysetEqual(
ExtraInfo.objects.values('note_id'),
[{'note_id': 1}, {'note_id': 2}]
)
# ...or use the field name.
self.assertValueQuerysetEqual(
ExtraInfo.objects.values('note'),
[{'note': 1}, {'note': 2}]
)
def test_ticket2902(self):
# Parameters can be given to extra_select, *if* you use an OrderedDict.
# (First we need to know which order the keys fall in "naturally" on
# your system, so we can put things in the wrong way around from
# normal. A normal dict would thus fail.)
s = [('a', '%s'), ('b', '%s')]
params = ['one', 'two']
if {'a': 1, 'b': 2}.keys() == ['a', 'b']:
s.reverse()
params.reverse()
# This slightly odd comparison works around the fact that PostgreSQL will
# return 'one' and 'two' as strings, not Unicode objects. It's a side-effect of
# using constants here and not a real concern.
d = Item.objects.extra(select=OrderedDict(s), select_params=params).values('a', 'b')[0]
self.assertEqual(d, {'a': 'one', 'b': 'two'})
# Order by the number of tags attached to an item.
l = Item.objects.extra(select={'count': 'select count(*) from queries_item_tags where queries_item_tags.item_id = queries_item.id'}).order_by('-count')
self.assertEqual([o.count for o in l], [2, 2, 1, 0])
def test_ticket6154(self):
# Multiple filter statements are joined using "AND" all the time.
self.assertQuerysetEqual(
Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)),
['<Author: a1>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(extra__note=self.n1) | Q(item__note=self.n3)).filter(id=self.a1.id),
['<Author: a1>']
)
def test_ticket6981(self):
self.assertQuerysetEqual(
Tag.objects.select_related('parent').order_by('name'),
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
def test_ticket9926(self):
self.assertQuerysetEqual(
Tag.objects.select_related("parent", "category").order_by('name'),
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.select_related('parent', "parent__category").order_by('name'),
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
def test_tickets_6180_6203(self):
# Dates with limits and/or counts
self.assertEqual(Item.objects.count(), 4)
self.assertEqual(Item.objects.datetimes('created', 'month').count(), 1)
self.assertEqual(Item.objects.datetimes('created', 'day').count(), 2)
self.assertEqual(len(Item.objects.datetimes('created', 'day')), 2)
self.assertEqual(Item.objects.datetimes('created', 'day')[0], datetime.datetime(2007, 12, 19, 0, 0))
def test_tickets_7087_12242(self):
# Dates with extra select columns
self.assertQuerysetEqual(
Item.objects.datetimes('created', 'day').extra(select={'a': 1}),
['datetime.datetime(2007, 12, 19, 0, 0)', 'datetime.datetime(2007, 12, 20, 0, 0)']
)
self.assertQuerysetEqual(
Item.objects.extra(select={'a': 1}).datetimes('created', 'day'),
['datetime.datetime(2007, 12, 19, 0, 0)', 'datetime.datetime(2007, 12, 20, 0, 0)']
)
name = "one"
self.assertQuerysetEqual(
Item.objects.datetimes('created', 'day').extra(where=['name=%s'], params=[name]),
['datetime.datetime(2007, 12, 19, 0, 0)']
)
self.assertQuerysetEqual(
Item.objects.extra(where=['name=%s'], params=[name]).datetimes('created', 'day'),
['datetime.datetime(2007, 12, 19, 0, 0)']
)
def test_ticket7155(self):
# Nullable dates
self.assertQuerysetEqual(
Item.objects.datetimes('modified', 'day'),
['datetime.datetime(2007, 12, 19, 0, 0)']
)
def test_ticket7098(self):
# Make sure semi-deprecated ordering by related models syntax still
# works.
self.assertValueQuerysetEqual(
Item.objects.values('note__note').order_by('queries_note.note', 'id'),
[{'note__note': 'n2'}, {'note__note': 'n3'}, {'note__note': 'n3'}, {'note__note': 'n3'}]
)
def test_ticket7096(self):
# Make sure exclude() with multiple conditions continues to work.
self.assertQuerysetEqual(
Tag.objects.filter(parent=self.t1, name='t3').order_by('name'),
['<Tag: t3>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(parent=self.t1, name='t3').order_by('name'),
['<Tag: t1>', '<Tag: t2>', '<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t1', name='one').order_by('name').distinct(),
['<Item: four>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(name__in=['three', 'four']).exclude(tags__name='t1').order_by('name'),
['<Item: four>', '<Item: three>']
)
# More twisted cases, involving nested negations.
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name='t1', name='one')),
['<Item: one>']
)
self.assertQuerysetEqual(
Item.objects.filter(~Q(tags__name='t1', name='one'), name='two'),
['<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name='t1', name='one'), name='two'),
['<Item: four>', '<Item: one>', '<Item: three>']
)
def test_tickets_7204_7506(self):
# Make sure querysets with related fields can be pickled. If this
# doesn't crash, it's a Good Thing.
pickle.dumps(Item.objects.all())
def test_ticket7813(self):
# We should also be able to pickle things that use select_related().
# The only tricky thing here is to ensure that we do the related
# selections properly after unpickling.
qs = Item.objects.select_related()
query = qs.query.get_compiler(qs.db).as_sql()[0]
query2 = pickle.loads(pickle.dumps(qs.query))
self.assertEqual(
query2.get_compiler(qs.db).as_sql()[0],
query
)
def test_deferred_load_qs_pickling(self):
# Check pickling of deferred-loading querysets
qs = Item.objects.defer('name', 'creator')
q2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(list(qs), list(q2))
q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL))
self.assertEqual(list(qs), list(q3))
def test_ticket7277(self):
self.assertQuerysetEqual(
self.n1.annotation_set.filter(Q(tag=self.t5) | Q(tag__children=self.t5) | Q(tag__children__children=self.t5)),
['<Annotation: a1>']
)
def test_tickets_7448_7707(self):
# Complex objects should be converted to strings before being used in
# lookups.
self.assertQuerysetEqual(
Item.objects.filter(created__in=[self.time1, self.time2]),
['<Item: one>', '<Item: two>']
)
def test_ticket7235(self):
# An EmptyQuerySet should not raise exceptions if it is filtered.
Eaten.objects.create(meal='m')
q = Eaten.objects.none()
with self.assertNumQueries(0):
self.assertQuerysetEqual(q.all(), [])
self.assertQuerysetEqual(q.filter(meal='m'), [])
self.assertQuerysetEqual(q.exclude(meal='m'), [])
self.assertQuerysetEqual(q.complex_filter({'pk': 1}), [])
self.assertQuerysetEqual(q.select_related('food'), [])
self.assertQuerysetEqual(q.annotate(Count('food')), [])
self.assertQuerysetEqual(q.order_by('meal', 'food'), [])
self.assertQuerysetEqual(q.distinct(), [])
self.assertQuerysetEqual(
q.extra(select={'foo': "1"}),
[]
)
q.query.low_mark = 1
self.assertRaisesMessage(
AssertionError,
'Cannot change a query once a slice has been taken',
q.extra, select={'foo': "1"}
)
self.assertQuerysetEqual(q.reverse(), [])
self.assertQuerysetEqual(q.defer('meal'), [])
self.assertQuerysetEqual(q.only('meal'), [])
def test_ticket7791(self):
# There were "issues" when ordering and distinct-ing on fields related
# via ForeignKeys.
self.assertEqual(
len(Note.objects.order_by('extrainfo__info').distinct()),
3
)
# Pickling of DateQuerySets used to fail
qs = Item.objects.datetimes('created', 'month')
pickle.loads(pickle.dumps(qs))
def test_ticket9997(self):
# If a ValuesList or Values queryset is passed as an inner query, we
# make sure it's only requesting a single value and use that as the
# thing to select.
self.assertQuerysetEqual(
Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name')),
['<Tag: t2>', '<Tag: t3>']
)
# Multi-valued values() and values_list() querysets should raise errors.
self.assertRaisesMessage(
TypeError,
'Cannot use a multi-field ValuesQuerySet as a filter value.',
lambda: Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name', 'id'))
)
self.assertRaisesMessage(
TypeError,
'Cannot use a multi-field ValuesListQuerySet as a filter value.',
lambda: Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values_list('name', 'id'))
)
def test_ticket9985(self):
# qs.values_list(...).values(...) combinations should work.
self.assertValueQuerysetEqual(
Note.objects.values_list("note", flat=True).values("id").order_by("id"),
[{'id': 1}, {'id': 2}, {'id': 3}]
)
self.assertQuerysetEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1").values_list('note').values('id')),
['<Annotation: a1>']
)
def test_ticket10205(self):
# When bailing out early because of an empty "__in" filter, we need
# to set things up correctly internally so that subqueries can continue properly.
self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0)
def test_ticket10432(self):
# Testing an empty "__in" filter with a generator as the value.
def f():
return iter([])
n_obj = Note.objects.all()[0]
def g():
for i in [n_obj.pk]:
yield i
self.assertQuerysetEqual(Note.objects.filter(pk__in=f()), [])
self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj])
def test_ticket10742(self):
# Queries used in an __in clause don't execute subqueries
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.filter(pk__in=subq)
self.assertQuerysetEqual(qs, ['<Author: a1>', '<Author: a2>'])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.exclude(pk__in=subq)
self.assertQuerysetEqual(qs, ['<Author: a3>', '<Author: a4>'])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
self.assertQuerysetEqual(
Author.objects.filter(Q(pk__in=subq) & Q(name='a1')),
['<Author: a1>']
)
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
def test_ticket7076(self):
# Excluding shouldn't eliminate NULL entries.
self.assertQuerysetEqual(
Item.objects.exclude(modified=self.time1).order_by('name'),
['<Item: four>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(parent__name=self.t1.name),
['<Tag: t1>', '<Tag: t4>', '<Tag: t5>']
)
def test_ticket7181(self):
# Ordering by related tables should accommodate nullable fields (this
# test is a little tricky, since NULL ordering is database dependent.
# Instead, we just count the number of results).
self.assertEqual(len(Tag.objects.order_by('parent__name')), 5)
# Empty querysets can be merged with others.
self.assertQuerysetEqual(
Note.objects.none() | Note.objects.all(),
['<Note: n1>', '<Note: n2>', '<Note: n3>']
)
self.assertQuerysetEqual(
Note.objects.all() | Note.objects.none(),
['<Note: n1>', '<Note: n2>', '<Note: n3>']
)
self.assertQuerysetEqual(Note.objects.none() & Note.objects.all(), [])
self.assertQuerysetEqual(Note.objects.all() & Note.objects.none(), [])
def test_ticket9411(self):
# Make sure bump_prefix() (an internal Query method) doesn't (re-)break. It's
# sufficient that this query runs without error.
qs = Tag.objects.values_list('id', flat=True).order_by('id')
qs.query.bump_prefix(qs.query)
first = qs[0]
self.assertEqual(list(qs), list(range(first, first + 5)))
def test_ticket8439(self):
# Complex combinations of conjunctions, disjunctions and nullable
# relations.
self.assertQuerysetEqual(
Author.objects.filter(Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name='xyz')),
['<Author: a2>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(report=self.r1, name='xyz') | Q(item__note__extrainfo=self.e2)),
['<Author: a2>']
)
self.assertQuerysetEqual(
Annotation.objects.filter(Q(tag__parent=self.t1) | Q(notes__note='n1', name='a1')),
['<Annotation: a1>']
)
xx = ExtraInfo.objects.create(info='xx', note=self.n3)
self.assertQuerysetEqual(
Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)),
['<Note: n1>', '<Note: n3>']
)
q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query
self.assertEqual(
len([x for x in q.alias_map.values() if x.join_type == LOUTER and q.alias_refcount[x.table_alias]]),
1
)
def test_ticket17429(self):
"""
Ensure that Meta.ordering=None works the same as Meta.ordering=[]
"""
original_ordering = Tag._meta.ordering
Tag._meta.ordering = None
try:
self.assertQuerysetEqual(
Tag.objects.all(),
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>'],
ordered=False
)
finally:
Tag._meta.ordering = original_ordering
def test_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t4'),
[repr(i) for i in Item.objects.filter(~Q(tags__name='t4'))])
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name='t4') | Q(tags__name='t3')),
[repr(i) for i in Item.objects.filter(~(Q(tags__name='t4') | Q(tags__name='t3')))])
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name='t4') | ~Q(tags__name='t3')),
[repr(i) for i in Item.objects.filter(~(Q(tags__name='t4') | ~Q(tags__name='t3')))])
def test_nested_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name='t4')),
[repr(i) for i in Item.objects.filter(~~Q(tags__name='t4'))])
def test_double_exclude(self):
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name='t4')),
[repr(i) for i in Item.objects.filter(~~Q(tags__name='t4'))])
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name='t4')),
[repr(i) for i in Item.objects.filter(~Q(~Q(tags__name='t4')))])
def test_exclude_in(self):
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name__in=['t4', 't3'])),
[repr(i) for i in Item.objects.filter(~Q(tags__name__in=['t4', 't3']))])
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name__in=['t4', 't3'])),
[repr(i) for i in Item.objects.filter(~~Q(tags__name__in=['t4', 't3']))])
def test_ticket_10790_1(self):
# Querying direct fields with isnull should trim the left outer join.
# It also should not create INNER JOIN.
q = Tag.objects.filter(parent__isnull=True)
self.assertQuerysetEqual(q, ['<Tag: t1>'])
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.filter(parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>'],
)
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__isnull=True)
self.assertQuerysetEqual(
q,
['<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>'],
)
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__isnull=False)
self.assertQuerysetEqual(q, ['<Tag: t1>'])
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertNotIn('INNER JOIN', str(q.query))
def test_ticket_10790_2(self):
# Querying across several tables should strip only the last outer join,
# while preserving the preceding inner joins.
q = Tag.objects.filter(parent__parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Tag: t4>', '<Tag: t5>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
# Querying without isnull should not convert anything to left outer join.
q = Tag.objects.filter(parent__parent=self.t1)
self.assertQuerysetEqual(
q,
['<Tag: t4>', '<Tag: t5>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
def test_ticket_10790_3(self):
# Querying via indirect fields should populate the left outer join
q = NamedCategory.objects.filter(tag__isnull=True)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
# join to dumbcategory ptr_id
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
self.assertQuerysetEqual(q, [])
# Querying across several tables should strip only the last join, while
# preserving the preceding left outer joins.
q = NamedCategory.objects.filter(tag__parent__isnull=True)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertQuerysetEqual(q, ['<NamedCategory: Generic>'])
def test_ticket_10790_4(self):
# Querying across m2m field should not strip the m2m table from join.
q = Author.objects.filter(item__tags__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a2>', '<Author: a3>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 2)
self.assertNotIn('INNER JOIN', str(q.query))
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a2>', '<Author: a2>', '<Author: a3>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3)
self.assertNotIn('INNER JOIN', str(q.query))
def test_ticket_10790_5(self):
# Querying with isnull=False across m2m field should not create outer joins
q = Author.objects.filter(item__tags__isnull=False)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a1>', '<Author: a2>', '<Author: a2>', '<Author: a4>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 2)
q = Author.objects.filter(item__tags__parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a2>', '<Author: a4>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 3)
q = Author.objects.filter(item__tags__parent__parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Author: a4>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 4)
def test_ticket_10790_6(self):
# Querying with isnull=True across m2m field should not create inner joins
# and strip last outer join
q = Author.objects.filter(item__tags__parent__parent__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a1>', '<Author: a2>', '<Author: a2>',
'<Author: a2>', '<Author: a3>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 4)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a2>', '<Author: a2>', '<Author: a3>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
def test_ticket_10790_7(self):
# Reverse querying with isnull should not strip the join
q = Author.objects.filter(item__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a3>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
q = Author.objects.filter(item__isnull=False)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a2>', '<Author: a2>', '<Author: a4>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
def test_ticket_10790_8(self):
# Querying with combined q-objects should also strip the left outer join
q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1))
self.assertQuerysetEqual(
q,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
def test_ticket_10790_combine(self):
# Combining queries should not re-populate the left outer join
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__isnull=False)
q3 = q1 | q2
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>'],
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q1 & q2
self.assertQuerysetEqual(q3, [])
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q2 = Tag.objects.filter(parent=self.t1)
q3 = q1 | q2
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q2 | q1
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__parent__isnull=True)
q3 = q1 | q2
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q2 | q1
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
def test_ticket19672(self):
self.assertQuerysetEqual(
Report.objects.filter(Q(creator__isnull=False) &
~Q(creator__extra__value=41)),
['<Report: r1>']
)
def test_ticket_20250(self):
# A negated Q along with an annotated queryset failed in Django 1.4
qs = Author.objects.annotate(Count('item'))
qs = qs.filter(~Q(extra__value=0))
self.assertIn('SELECT', str(qs.query))
self.assertQuerysetEqual(
qs,
['<Author: a1>', '<Author: a2>', '<Author: a3>', '<Author: a4>']
)
def test_callable_args(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
qs = Tag.objects.filter(name__startswith=lambda: 't')
self.assertQuerysetEqual(
qs,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, RemovedInDjango19Warning))
class Queries2Tests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=4)
Number.objects.create(num=8)
Number.objects.create(num=12)
def test_ticket4289(self):
# A slight variation on the restricting the filtering choices by the
# lookup constraints.
self.assertQuerysetEqual(Number.objects.filter(num__lt=4), [])
self.assertQuerysetEqual(Number.objects.filter(num__gt=8, num__lt=12), [])
self.assertQuerysetEqual(
Number.objects.filter(num__gt=8, num__lt=13),
['<Number: 12>']
)
self.assertQuerysetEqual(
Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)),
[]
)
self.assertQuerysetEqual(
Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)),
[]
)
self.assertQuerysetEqual(
Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)),
[]
)
self.assertQuerysetEqual(
Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)),
['<Number: 8>']
)
def test_ticket12239(self):
# Float was being rounded to integer on gte queries on integer field. Tests
# show that gt, lt, gte, and lte work as desired. Note that the fix changes
# get_prep_lookup for gte and lt queries only.
self.assertQuerysetEqual(
Number.objects.filter(num__gt=11.9),
['<Number: 12>']
)
self.assertQuerysetEqual(Number.objects.filter(num__gt=12), [])
self.assertQuerysetEqual(Number.objects.filter(num__gt=12.0), [])
self.assertQuerysetEqual(Number.objects.filter(num__gt=12.1), [])
self.assertQuerysetEqual(
Number.objects.filter(num__lt=12),
['<Number: 4>', '<Number: 8>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lt=12.0),
['<Number: 4>', '<Number: 8>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lt=12.1),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__gte=11.9),
['<Number: 12>']
)
self.assertQuerysetEqual(
Number.objects.filter(num__gte=12),
['<Number: 12>']
)
self.assertQuerysetEqual(
Number.objects.filter(num__gte=12.0),
['<Number: 12>']
)
self.assertQuerysetEqual(Number.objects.filter(num__gte=12.1), [])
self.assertQuerysetEqual(Number.objects.filter(num__gte=12.9), [])
self.assertQuerysetEqual(
Number.objects.filter(num__lte=11.9),
['<Number: 4>', '<Number: 8>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lte=12),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lte=12.0),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lte=12.1),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lte=12.9),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
def test_ticket7759(self):
# Count should work with a partially read result set.
count = Number.objects.count()
qs = Number.objects.all()
def run():
for obj in qs:
return qs.count() == count
self.assertTrue(run())
class Queries3Tests(BaseQuerysetTest):
def test_ticket7107(self):
# This shouldn't create an infinite loop.
self.assertQuerysetEqual(Valid.objects.all(), [])
def test_ticket8683(self):
# Raise proper error when a DateQuerySet gets passed a wrong type of
# field
self.assertRaisesMessage(
AssertionError,
"'name' isn't a DateTimeField.",
Item.objects.datetimes, 'name', 'month'
)
def test_ticket22023(self):
# only() and defer() are not applicable for ValuesQuerySet
with self.assertRaisesMessage(NotImplementedError,
"ValuesQuerySet does not implement only()"):
Valid.objects.values().only()
with self.assertRaisesMessage(NotImplementedError,
"ValuesQuerySet does not implement defer()"):
Valid.objects.values().defer()
class Queries4Tests(BaseQuerysetTest):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name='t1', category=generic)
n1 = Note.objects.create(note='n1', misc='foo', id=1)
n2 = Note.objects.create(note='n2', misc='bar', id=2)
e1 = ExtraInfo.objects.create(info='e1', note=n1)
e2 = ExtraInfo.objects.create(info='e2', note=n2)
cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1)
cls.a3 = Author.objects.create(name='a3', num=3003, extra=e2)
cls.r1 = Report.objects.create(name='r1', creator=cls.a1)
cls.r2 = Report.objects.create(name='r2', creator=cls.a3)
cls.r3 = Report.objects.create(name='r3')
Item.objects.create(name='i1', created=datetime.datetime.now(), note=n1, creator=cls.a1)
Item.objects.create(name='i2', created=datetime.datetime.now(), note=n1, creator=cls.a3)
def test_ticket11811(self):
unsaved_category = NamedCategory(name="Other")
with six.assertRaisesRegex(self, ValueError,
'Unsaved model instance <NamedCategory: Other> '
'cannot be used in an ORM query.'):
Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category)
def test_ticket14876(self):
# Note: when combining the query we need to have information available
# about the join type of the trimmed "creator__isnull" join. If we
# don't have that information, then the join is created as INNER JOIN
# and results will be incorrect.
q1 = Report.objects.filter(Q(creator__isnull=True) | Q(creator__extra__info='e1'))
q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(Q(creator__extra__info='e1'))
self.assertQuerysetEqual(q1, ["<Report: r1>", "<Report: r3>"], ordered=False)
self.assertEqual(str(q1.query), str(q2.query))
q1 = Report.objects.filter(Q(creator__extra__info='e1') | Q(creator__isnull=True))
q2 = Report.objects.filter(Q(creator__extra__info='e1')) | Report.objects.filter(Q(creator__isnull=True))
self.assertQuerysetEqual(q1, ["<Report: r1>", "<Report: r3>"], ordered=False)
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(Q(creator=self.a1) | Q(creator__report__name='r1')).order_by()
q2 = Item.objects.filter(Q(creator=self.a1)).order_by() | Item.objects.filter(Q(creator__report__name='r1')).order_by()
self.assertQuerysetEqual(q1, ["<Item: i1>"])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by()
q2 = Item.objects.filter(Q(creator__report__name='e1')).order_by() | Item.objects.filter(Q(creator=self.a1)).order_by()
self.assertQuerysetEqual(q1, ["<Item: i1>"])
self.assertEqual(str(q1.query), str(q2.query))
def test_combine_join_reuse(self):
# Test that we correctly recreate joins having identical connections
# in the rhs query, in case the query is ORed together. Related to
# ticket #18748
Report.objects.create(name='r4', creator=self.a1)
q1 = Author.objects.filter(report__name='r5')
q2 = Author.objects.filter(report__name='r4').filter(report__name='r1')
combined = q1 | q2
self.assertEqual(str(combined.query).count('JOIN'), 2)
self.assertEqual(len(combined), 1)
self.assertEqual(combined[0].name, 'a1')
def test_ticket7095(self):
# Updates that are filtered on the model being updated are somewhat
# tricky in MySQL. This exercises that case.
ManagedModel.objects.create(data='mm1', tag=self.t1, public=True)
self.assertEqual(ManagedModel.objects.update(data='mm'), 1)
# A values() or values_list() query across joined models must use outer
# joins appropriately.
# Note: In Oracle, we expect a null CharField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_charfield_repr = ''
else:
expected_null_charfield_repr = None
self.assertValueQuerysetEqual(
Report.objects.values_list("creator__extra__info", flat=True).order_by("name"),
['e1', 'e2', expected_null_charfield_repr],
)
# Similarly for select_related(), joins beyond an initial nullable join
# must use outer joins so that all results are included.
self.assertQuerysetEqual(
Report.objects.select_related("creator", "creator__extra").order_by("name"),
['<Report: r1>', '<Report: r2>', '<Report: r3>']
)
# When there are multiple paths to a table from another table, we have
# to be careful not to accidentally reuse an inappropriate join when
# using select_related(). We used to return the parent's Detail record
# here by mistake.
d1 = Detail.objects.create(data="d1")
d2 = Detail.objects.create(data="d2")
m1 = Member.objects.create(name="m1", details=d1)
m2 = Member.objects.create(name="m2", details=d2)
Child.objects.create(person=m2, parent=m1)
obj = m1.children.select_related("person__details")[0]
self.assertEqual(obj.person.details.data, 'd2')
def test_order_by_resetting(self):
# Calling order_by() with no parameters removes any existing ordering on the
# model. But it should still be possible to add new ordering after that.
qs = Author.objects.order_by().order_by('name')
self.assertIn('ORDER BY', qs.query.get_compiler(qs.db).as_sql()[0])
def test_order_by_reverse_fk(self):
# It is possible to order by reverse of foreign key, although that can lead
# to duplicate results.
c1 = SimpleCategory.objects.create(name="category1")
c2 = SimpleCategory.objects.create(name="category2")
CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c1)
self.assertQuerysetEqual(
SimpleCategory.objects.order_by('categoryitem', 'pk'),
[c1, c2, c1], lambda x: x)
def test_ticket10181(self):
# Avoid raising an EmptyResultSet if an inner query is probably
# empty (and hence, not executed).
self.assertQuerysetEqual(
Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])),
[]
)
def test_ticket15316_filter_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1",
special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2",
special_name="special2")
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 2)
self.assertQuerysetEqual(qs, [ci2.pk, ci3.pk], lambda x: x.pk, False)
def test_ticket15316_exclude_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1",
special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2",
special_name="special2")
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertQuerysetEqual(qs, [ci1.pk], lambda x: x.pk)
def test_ticket15316_filter_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1",
special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2",
special_name="special2")
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertQuerysetEqual(qs, [ci1.pk], lambda x: x.pk)
def test_ticket15316_exclude_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1",
special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2",
special_name="special2")
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 2)
self.assertQuerysetEqual(qs, [ci2.pk, ci3.pk], lambda x: x.pk, False)
def test_ticket15316_one2one_filter_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=False)
self.assertEqual(qs.count(), 2)
self.assertQuerysetEqual(qs, [ci2.pk, ci3.pk], lambda x: x.pk, False)
def test_ticket15316_one2one_exclude_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertQuerysetEqual(qs, [ci1.pk], lambda x: x.pk)
def test_ticket15316_one2one_filter_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertQuerysetEqual(qs, [ci1.pk], lambda x: x.pk)
def test_ticket15316_one2one_exclude_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=True)
self.assertEqual(qs.count(), 2)
self.assertQuerysetEqual(qs, [ci2.pk, ci3.pk], lambda x: x.pk, False)
class Queries5Tests(TestCase):
@classmethod
def setUpTestData(cls):
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the
# Meta.ordering will be rank3, rank2, rank1.
n1 = Note.objects.create(note='n1', misc='foo', id=1)
n2 = Note.objects.create(note='n2', misc='bar', id=2)
e1 = ExtraInfo.objects.create(info='e1', note=n1)
e2 = ExtraInfo.objects.create(info='e2', note=n2)
a1 = Author.objects.create(name='a1', num=1001, extra=e1)
a2 = Author.objects.create(name='a2', num=2002, extra=e1)
a3 = Author.objects.create(name='a3', num=3003, extra=e2)
cls.rank1 = Ranking.objects.create(rank=2, author=a2)
Ranking.objects.create(rank=1, author=a3)
Ranking.objects.create(rank=3, author=a1)
def test_ordering(self):
# Cross model ordering is possible in Meta, too.
self.assertQuerysetEqual(
Ranking.objects.all(),
['<Ranking: 3: a1>', '<Ranking: 2: a2>', '<Ranking: 1: a3>']
)
self.assertQuerysetEqual(
Ranking.objects.all().order_by('rank'),
['<Ranking: 1: a3>', '<Ranking: 2: a2>', '<Ranking: 3: a1>']
)
# Ordering of extra() pieces is possible, too and you can mix extra
# fields and model fields in the ordering.
self.assertQuerysetEqual(
Ranking.objects.extra(tables=['django_site'], order_by=['-django_site.id', 'rank']),
['<Ranking: 1: a3>', '<Ranking: 2: a2>', '<Ranking: 3: a1>']
)
qs = Ranking.objects.extra(select={'good': 'case when rank > 2 then 1 else 0 end'})
self.assertEqual(
[o.good for o in qs.extra(order_by=('-good',))],
[True, False, False]
)
self.assertQuerysetEqual(
qs.extra(order_by=('-good', 'id')),
['<Ranking: 3: a1>', '<Ranking: 2: a2>', '<Ranking: 1: a3>']
)
# Despite having some extra aliases in the query, we can still omit
# them in a values() query.
dicts = qs.values('id', 'rank').order_by('id')
self.assertEqual(
[d['rank'] for d in dicts],
[2, 1, 3]
)
def test_ticket7256(self):
# An empty values() call includes all aliases, including those from an
# extra()
qs = Ranking.objects.extra(select={'good': 'case when rank > 2 then 1 else 0 end'})
dicts = qs.values().order_by('id')
for d in dicts:
del d['id']
del d['author_id']
self.assertEqual(
[sorted(d.items()) for d in dicts],
[[('good', 0), ('rank', 2)], [('good', 0), ('rank', 1)], [('good', 1), ('rank', 3)]]
)
def test_ticket7045(self):
# Extra tables used to crash SQL construction on the second use.
qs = Ranking.objects.extra(tables=['django_site'])
qs.query.get_compiler(qs.db).as_sql()
# test passes if this doesn't raise an exception.
qs.query.get_compiler(qs.db).as_sql()
def test_ticket9848(self):
# Make sure that updates which only filter on sub-tables don't
# inadvertently update the wrong records (bug #9848).
# Make sure that the IDs from different tables don't happen to match.
self.assertQuerysetEqual(
Ranking.objects.filter(author__name='a1'),
['<Ranking: 3: a1>']
)
self.assertEqual(
Ranking.objects.filter(author__name='a1').update(rank='4'),
1
)
r = Ranking.objects.filter(author__name='a1')[0]
self.assertNotEqual(r.id, r.author.id)
self.assertEqual(r.rank, 4)
r.rank = 3
r.save()
self.assertQuerysetEqual(
Ranking.objects.all(),
['<Ranking: 3: a1>', '<Ranking: 2: a2>', '<Ranking: 1: a3>']
)
def test_ticket5261(self):
# Test different empty excludes.
self.assertQuerysetEqual(
Note.objects.exclude(Q()),
['<Note: n1>', '<Note: n2>']
)
self.assertQuerysetEqual(
Note.objects.filter(~Q()),
['<Note: n1>', '<Note: n2>']
)
self.assertQuerysetEqual(
Note.objects.filter(~Q() | ~Q()),
['<Note: n1>', '<Note: n2>']
)
self.assertQuerysetEqual(
Note.objects.exclude(~Q() & ~Q()),
['<Note: n1>', '<Note: n2>']
)
def test_extra_select_literal_percent_s(self):
# Allow %%s to escape select clauses
self.assertEqual(
Note.objects.extra(select={'foo': "'%%s'"})[0].foo,
'%s'
)
self.assertEqual(
Note.objects.extra(select={'foo': "'%%s bar %%s'"})[0].foo,
'%s bar %s'
)
self.assertEqual(
Note.objects.extra(select={'foo': "'bar %%s'"})[0].foo,
'bar %s'
)
class SelectRelatedTests(TestCase):
def test_tickets_3045_3288(self):
# Once upon a time, select_related() with circular relations would loop
# infinitely if you forgot to specify "depth". Now we set an arbitrary
# default upper bound.
self.assertQuerysetEqual(X.objects.all(), [])
self.assertQuerysetEqual(X.objects.select_related(), [])
class SubclassFKTests(TestCase):
def test_ticket7778(self):
# Model subclasses could not be deleted if a nullable foreign key
# relates to a model that relates back.
num_celebs = Celebrity.objects.count()
tvc = TvChef.objects.create(name="Huey")
self.assertEqual(Celebrity.objects.count(), num_celebs + 1)
Fan.objects.create(fan_of=tvc)
Fan.objects.create(fan_of=tvc)
tvc.delete()
# The parent object should have been deleted as well.
self.assertEqual(Celebrity.objects.count(), num_celebs)
class CustomPkTests(TestCase):
def test_ticket7371(self):
self.assertQuerysetEqual(Related.objects.order_by('custom'), [])
class NullableRelOrderingTests(TestCase):
def test_ticket10028(self):
# Ordering by model related to nullable relations(!) should use outer
# joins, so that all results are included.
Plaything.objects.create(name="p1")
self.assertQuerysetEqual(
Plaything.objects.all(),
['<Plaything: p1>']
)
def test_join_already_in_query(self):
# Ordering by model related to nullable relations should not change
# the join type of already existing joins.
Plaything.objects.create(name="p1")
s = SingleObject.objects.create(name='s')
r = RelatedObject.objects.create(single=s, f=1)
Plaything.objects.create(name="p2", others=r)
qs = Plaything.objects.all().filter(others__isnull=False).order_by('pk')
self.assertNotIn('JOIN', str(qs.query))
qs = Plaything.objects.all().filter(others__f__isnull=False).order_by('pk')
self.assertIn('INNER', str(qs.query))
qs = qs.order_by('others__single__name')
# The ordering by others__single__pk will add one new join (to single)
# and that join must be LEFT join. The already existing join to related
# objects must be kept INNER. So, we have both an INNER and a LEFT join
# in the query.
self.assertEqual(str(qs.query).count('LEFT'), 1)
self.assertEqual(str(qs.query).count('INNER'), 1)
self.assertQuerysetEqual(
qs,
['<Plaything: p2>']
)
class DisjunctiveFilterTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note='n1', misc='foo', id=1)
ExtraInfo.objects.create(info='e1', note=cls.n1)
def test_ticket7872(self):
# Another variation on the disjunctive filtering theme.
# For the purposes of this regression test, it's important that there is no
# Join object releated to the LeafA we create.
LeafA.objects.create(data='first')
self.assertQuerysetEqual(LeafA.objects.all(), ['<LeafA: first>'])
self.assertQuerysetEqual(
LeafA.objects.filter(Q(data='first') | Q(join__b__data='second')),
['<LeafA: first>']
)
def test_ticket8283(self):
# Checking that applying filters after a disjunction works correctly.
self.assertQuerysetEqual(
(ExtraInfo.objects.filter(note=self.n1) | ExtraInfo.objects.filter(info='e2')).filter(note=self.n1),
['<ExtraInfo: e1>']
)
self.assertQuerysetEqual(
(ExtraInfo.objects.filter(info='e2') | ExtraInfo.objects.filter(note=self.n1)).filter(note=self.n1),
['<ExtraInfo: e1>']
)
class Queries6Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
t1 = Tag.objects.create(name='t1', category=generic)
Tag.objects.create(name='t2', parent=t1, category=generic)
t3 = Tag.objects.create(name='t3', parent=t1)
t4 = Tag.objects.create(name='t4', parent=t3)
Tag.objects.create(name='t5', parent=t3)
n1 = Note.objects.create(note='n1', misc='foo', id=1)
ann1 = Annotation.objects.create(name='a1', tag=t1)
ann1.notes.add(n1)
Annotation.objects.create(name='a2', tag=t4)
def test_parallel_iterators(self):
# Test that parallel iterators work.
qs = Tag.objects.all()
i1, i2 = iter(qs), iter(qs)
self.assertEqual(repr(next(i1)), '<Tag: t1>')
self.assertEqual(repr(next(i1)), '<Tag: t2>')
self.assertEqual(repr(next(i2)), '<Tag: t1>')
self.assertEqual(repr(next(i2)), '<Tag: t2>')
self.assertEqual(repr(next(i2)), '<Tag: t3>')
self.assertEqual(repr(next(i1)), '<Tag: t3>')
qs = X.objects.all()
self.assertEqual(bool(qs), False)
self.assertEqual(bool(qs), False)
def test_nested_queries_sql(self):
# Nested queries should not evaluate the inner query as part of constructing the
# SQL (so we should see a nested query here, indicated by two "SELECT" calls).
qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
self.assertEqual(
qs.query.get_compiler(qs.db).as_sql()[0].count('SELECT'),
2
)
def test_tickets_8921_9188(self):
# Incorrect SQL was being generated for certain types of exclude()
# queries that crossed multi-valued relations (#8921, #9188 and some
# pre-emptively discovered cases).
self.assertQuerysetEqual(
PointerA.objects.filter(connection__pointerb__id=1),
[]
)
self.assertQuerysetEqual(
PointerA.objects.exclude(connection__pointerb__id=1),
[]
)
self.assertQuerysetEqual(
Tag.objects.exclude(children=None),
['<Tag: t1>', '<Tag: t3>']
)
# This example is tricky because the parent could be NULL, so only checking
# parents with annotations omits some results (tag t1, in this case).
self.assertQuerysetEqual(
Tag.objects.exclude(parent__annotation__name="a1"),
['<Tag: t1>', '<Tag: t4>', '<Tag: t5>']
)
# The annotation->tag link is single values and tag->children links is
# multi-valued. So we have to split the exclude filter in the middle
# and then optimize the inner query without losing results.
self.assertQuerysetEqual(
Annotation.objects.exclude(tag__children__name="t2"),
['<Annotation: a2>']
)
# Nested queries are possible (although should be used with care, since
# they have performance problems on backends like MySQL.
self.assertQuerysetEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")),
['<Annotation: a1>']
)
def test_ticket3739(self):
# The all() method on querysets returns a copy of the queryset.
q1 = Tag.objects.order_by('name')
self.assertIsNot(q1, q1.all())
def test_ticket_11320(self):
qs = Tag.objects.exclude(category=None).exclude(category__name='foo')
self.assertEqual(str(qs.query).count(' INNER JOIN '), 1)
class RawQueriesTests(TestCase):
def setUp(self):
Note.objects.create(note='n1', misc='foo', id=1)
def test_ticket14729(self):
# Test representation of raw query with one or few parameters passed as list
query = "SELECT * FROM queries_note WHERE note = %s"
params = ['n1']
qs = Note.objects.raw(query, params=params)
self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>")
query = "SELECT * FROM queries_note WHERE note = %s and misc = %s"
params = ['n1', 'foo']
qs = Note.objects.raw(query, params=params)
self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>")
class GeneratorExpressionTests(TestCase):
def test_ticket10432(self):
# Using an empty generator expression as the rvalue for an "__in"
# lookup is legal.
self.assertQuerysetEqual(
Note.objects.filter(pk__in=(x for x in ())),
[]
)
class ComparisonTests(TestCase):
def setUp(self):
self.n1 = Note.objects.create(note='n1', misc='foo', id=1)
e1 = ExtraInfo.objects.create(info='e1', note=self.n1)
self.a2 = Author.objects.create(name='a2', num=2002, extra=e1)
def test_ticket8597(self):
# Regression tests for case-insensitive comparisons
Item.objects.create(name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1)
Item.objects.create(name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1)
self.assertQuerysetEqual(
Item.objects.filter(name__iexact="A_b"),
['<Item: a_b>']
)
self.assertQuerysetEqual(
Item.objects.filter(name__iexact="x%Y"),
['<Item: x%y>']
)
self.assertQuerysetEqual(
Item.objects.filter(name__istartswith="A_b"),
['<Item: a_b>']
)
self.assertQuerysetEqual(
Item.objects.filter(name__iendswith="A_b"),
['<Item: a_b>']
)
class ExistsSql(TestCase):
def test_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertFalse(Tag.objects.exists())
# Ok - so the exist query worked - but did it include too many columns?
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]
id, name = connection.ops.quote_name('id'), connection.ops.quote_name('name')
self.assertNotIn(id, qstr)
self.assertNotIn(name, qstr)
def test_ticket_18414(self):
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
self.assertTrue(Article.objects.exists())
self.assertTrue(Article.objects.distinct().exists())
self.assertTrue(Article.objects.distinct()[1:3].exists())
self.assertFalse(Article.objects.distinct()[1:1].exists())
@unittest.skipUnless(connection.features.can_distinct_on_fields,
'Uses distinct(fields)')
def test_ticket_18414_distinct_on(self):
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
self.assertTrue(Article.objects.distinct('name').exists())
self.assertTrue(Article.objects.distinct('name')[1:2].exists())
self.assertFalse(Article.objects.distinct('name')[2:3].exists())
class QuerysetOrderedTests(unittest.TestCase):
"""
Tests for the Queryset.ordered attribute.
"""
def test_no_default_or_explicit_ordering(self):
self.assertEqual(Annotation.objects.all().ordered, False)
def test_cleared_default_ordering(self):
self.assertEqual(Tag.objects.all().ordered, True)
self.assertEqual(Tag.objects.all().order_by().ordered, False)
def test_explicit_ordering(self):
self.assertEqual(Annotation.objects.all().order_by('id').ordered, True)
def test_order_by_extra(self):
self.assertEqual(Annotation.objects.all().extra(order_by=['id']).ordered, True)
def test_annotated_ordering(self):
qs = Annotation.objects.annotate(num_notes=Count('notes'))
self.assertEqual(qs.ordered, False)
self.assertEqual(qs.order_by('num_notes').ordered, True)
@skipUnlessDBFeature('allow_sliced_subqueries')
class SubqueryTests(TestCase):
@classmethod
def setUpTestData(cls):
DumbCategory.objects.create(id=1)
DumbCategory.objects.create(id=2)
DumbCategory.objects.create(id=3)
DumbCategory.objects.create(id=4)
def test_ordered_subselect(self):
"Subselects honor any manual ordering"
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])
self.assertEqual(set(query.values_list('id', flat=True)), {1, 2})
def test_slice_subquery_and_query(self):
"""
Slice a query that has a sliced subquery
"""
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])[0:2]
self.assertEqual({x.id for x in query}, {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:3])[1:3]
self.assertEqual({x.id for x in query}, {3})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])[1:]
self.assertEqual({x.id for x in query}, {2})
def test_related_sliced_subquery(self):
"""
Related objects constraints can safely contain sliced subqueries.
refs #22434
"""
generic = NamedCategory.objects.create(id=5, name="Generic")
t1 = Tag.objects.create(name='t1', category=generic)
t2 = Tag.objects.create(name='t2', category=generic)
ManagedModel.objects.create(data='mm1', tag=t1, public=True)
mm2 = ManagedModel.objects.create(data='mm2', tag=t2, public=True)
query = ManagedModel.normal_manager.filter(
tag__in=Tag.objects.order_by('-id')[:1]
)
self.assertEqual({x.id for x in query}, {mm2.id})
def test_sliced_delete(self):
"Delete queries can safely contain sliced subqueries"
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:1]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 2, 3})
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 3})
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {3})
class CloneTests(TestCase):
def test_evaluated_queryset_as_argument(self):
"#13227 -- If a queryset is already evaluated, it can still be used as a query arg"
n = Note(note='Test1', misc='misc')
n.save()
e = ExtraInfo(info='good', note=n)
e.save()
n_list = Note.objects.all()
# Evaluate the Note queryset, populating the query cache
list(n_list)
# Use the note queryset in a query, and evaluate
# that query in a way that involves cloning.
self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, 'good')
def test_no_model_options_cloning(self):
"""
Test that cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta)
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model options shouldn't be cloned.")
try:
Note.objects.filter(pk__lte=F('pk') + 1).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
def test_no_fields_cloning(self):
"""
Test that cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta.get_field("misc"))
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model fields shouldn't be cloned")
try:
Note.objects.filter(note=F('misc')).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
class EmptyQuerySetTests(TestCase):
def test_emptyqueryset_values(self):
# #14366 -- Calling .values() on an empty QuerySet and then cloning
# that should not cause an error
self.assertQuerysetEqual(
Number.objects.none().values('num').order_by('num'), []
)
def test_values_subquery(self):
self.assertQuerysetEqual(
Number.objects.filter(pk__in=Number.objects.none().values("pk")),
[]
)
self.assertQuerysetEqual(
Number.objects.filter(pk__in=Number.objects.none().values_list("pk")),
[]
)
def test_ticket_19151(self):
# #19151 -- Calling .values() or .values_list() on an empty QuerySet
# should return an empty QuerySet and not cause an error.
q = Author.objects.none()
self.assertQuerysetEqual(q.values(), [])
self.assertQuerysetEqual(q.values_list(), [])
class ValuesQuerysetTests(BaseQuerysetTest):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=72)
cls.identity = staticmethod(lambda x: x)
def test_flat_values_list(self):
qs = Number.objects.values_list("num")
qs = qs.values_list("num", flat=True)
self.assertValueQuerysetEqual(qs, [72])
def test_extra_values(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select=OrderedDict([('value_plus_x', 'num+%s'),
('value_minus_x', 'num-%s')]),
select_params=(1, 2))
qs = qs.order_by('value_minus_x')
qs = qs.values('num')
self.assertQuerysetEqual(qs, [{'num': 72}], self.identity)
def test_extra_values_order_twice(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'})
qs = qs.order_by('value_minus_one').order_by('value_plus_one')
qs = qs.values('num')
self.assertQuerysetEqual(qs, [{'num': 72}], self.identity)
def test_extra_values_order_multiple(self):
# Postgres doesn't allow constants in order by, so check for that.
qs = Number.objects.extra(select={
'value_plus_one': 'num+1',
'value_minus_one': 'num-1',
'constant_value': '1'
})
qs = qs.order_by('value_plus_one', 'value_minus_one', 'constant_value')
qs = qs.values('num')
self.assertQuerysetEqual(qs, [{'num': 72}], self.identity)
def test_extra_values_order_in_extra(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'},
order_by=['value_minus_one'])
qs = qs.values('num')
def test_extra_select_params_values_order_in_extra(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={'value_plus_x': 'num+%s'},
select_params=[1],
order_by=['value_plus_x'])
qs = qs.filter(num=72)
qs = qs.values('num')
self.assertQuerysetEqual(qs, [{'num': 72}], self.identity)
def test_extra_multiple_select_params_values_order_by(self):
# testing for 23259 issue
qs = Number.objects.extra(select=OrderedDict([('value_plus_x', 'num+%s'),
('value_minus_x', 'num-%s')]),
select_params=(72, 72))
qs = qs.order_by('value_minus_x')
qs = qs.filter(num=1)
qs = qs.values('num')
self.assertQuerysetEqual(qs, [], self.identity)
def test_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1'})
qs = qs.order_by('value_plus_one')
qs = qs.values_list('num')
self.assertQuerysetEqual(qs, [(72,)], self.identity)
def test_flat_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1'})
qs = qs.order_by('value_plus_one')
qs = qs.values_list('num', flat=True)
self.assertQuerysetEqual(qs, [72], self.identity)
def test_field_error_values_list(self):
# see #23443
with self.assertRaisesMessage(FieldError,
"Cannot resolve keyword %r into field."
" Join on 'name' not permitted." % 'foo'):
Tag.objects.values_list('name__foo')
class QuerySetSupportsPythonIdioms(TestCase):
@classmethod
def setUpTestData(cls):
some_date = datetime.datetime(2014, 5, 16, 12, 1)
for i in range(1, 8):
Article.objects.create(
name="Article {}".format(i), created=some_date)
def get_ordered_articles(self):
return Article.objects.all().order_by('name')
def test_can_get_items_using_index_and_slice_notation(self):
self.assertEqual(self.get_ordered_articles()[0].name, 'Article 1')
self.assertQuerysetEqual(self.get_ordered_articles()[1:3],
["<Article: Article 2>", "<Article: Article 3>"])
def test_slicing_with_steps_can_be_used(self):
self.assertQuerysetEqual(self.get_ordered_articles()[::2],
["<Article: Article 1>",
"<Article: Article 3>",
"<Article: Article 5>",
"<Article: Article 7>"])
@unittest.skipUnless(six.PY2, "Python 2 only -- Python 3 doesn't have longs.")
def test_slicing_works_with_longs(self):
self.assertEqual(self.get_ordered_articles()[long(0)].name, 'Article 1')
self.assertQuerysetEqual(self.get_ordered_articles()[long(1):long(3)],
["<Article: Article 2>", "<Article: Article 3>"])
self.assertQuerysetEqual(self.get_ordered_articles()[::long(2)],
["<Article: Article 1>",
"<Article: Article 3>",
"<Article: Article 5>",
"<Article: Article 7>"])
# And can be mixed with ints.
self.assertQuerysetEqual(self.get_ordered_articles()[1:long(3)],
["<Article: Article 2>", "<Article: Article 3>"])
def test_slicing_without_step_is_lazy(self):
with self.assertNumQueries(0):
self.get_ordered_articles()[0:5]
def test_slicing_with_tests_is_not_lazy(self):
with self.assertNumQueries(1):
self.get_ordered_articles()[0:5:3]
def test_slicing_can_slice_again_after_slicing(self):
self.assertQuerysetEqual(self.get_ordered_articles()[0:5][0:2],
["<Article: Article 1>",
"<Article: Article 2>"])
self.assertQuerysetEqual(self.get_ordered_articles()[0:5][4:],
["<Article: Article 5>"])
self.assertQuerysetEqual(self.get_ordered_articles()[0:5][5:], [])
# Some more tests!
self.assertQuerysetEqual(self.get_ordered_articles()[2:][0:2],
["<Article: Article 3>", "<Article: Article 4>"])
self.assertQuerysetEqual(self.get_ordered_articles()[2:][:2],
["<Article: Article 3>", "<Article: Article 4>"])
self.assertQuerysetEqual(self.get_ordered_articles()[2:][2:3],
["<Article: Article 5>"])
# Using an offset without a limit is also possible.
self.assertQuerysetEqual(self.get_ordered_articles()[5:],
["<Article: Article 6>",
"<Article: Article 7>"])
def test_slicing_cannot_filter_queryset_once_sliced(self):
six.assertRaisesRegex(
self,
AssertionError,
"Cannot filter a query once a slice has been taken.",
Article.objects.all()[0:5].filter,
id=1,
)
def test_slicing_cannot_reorder_queryset_once_sliced(self):
six.assertRaisesRegex(
self,
AssertionError,
"Cannot reorder a query once a slice has been taken.",
Article.objects.all()[0:5].order_by,
'id',
)
def test_slicing_cannot_combine_queries_once_sliced(self):
six.assertRaisesRegex(
self,
AssertionError,
"Cannot combine queries once a slice has been taken.",
lambda: Article.objects.all()[0:1] & Article.objects.all()[4:5]
)
def test_slicing_negative_indexing_not_supported_for_single_element(self):
"""hint: inverting your ordering might do what you need"""
six.assertRaisesRegex(
self,
AssertionError,
"Negative indexing is not supported.",
lambda: Article.objects.all()[-1]
)
def test_slicing_negative_indexing_not_supported_for_range(self):
"""hint: inverting your ordering might do what you need"""
six.assertRaisesRegex(
self,
AssertionError,
"Negative indexing is not supported.",
lambda: Article.objects.all()[0:-5]
)
def test_can_get_number_of_items_in_queryset_using_standard_len(self):
self.assertEqual(len(Article.objects.filter(name__exact='Article 1')), 1)
def test_can_combine_queries_using_and_and_or_operators(self):
s1 = Article.objects.filter(name__exact='Article 1')
s2 = Article.objects.filter(name__exact='Article 2')
self.assertQuerysetEqual((s1 | s2).order_by('name'),
["<Article: Article 1>",
"<Article: Article 2>"])
self.assertQuerysetEqual(s1 & s2, [])
class WeirdQuerysetSlicingTests(BaseQuerysetTest):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=1)
Number.objects.create(num=2)
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
Article.objects.create(name='three', created=datetime.datetime.now())
Article.objects.create(name='four', created=datetime.datetime.now())
def test_tickets_7698_10202(self):
# People like to slice with '0' as the high-water mark.
self.assertQuerysetEqual(Article.objects.all()[0:0], [])
self.assertQuerysetEqual(Article.objects.all()[0:0][:10], [])
self.assertEqual(Article.objects.all()[:0].count(), 0)
self.assertRaisesMessage(
AssertionError,
'Cannot change a query once a slice has been taken.',
Article.objects.all()[:0].latest, 'created'
)
def test_empty_resultset_sql(self):
# ticket #12192
self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1]))
class EscapingTests(TestCase):
def test_ticket_7302(self):
# Reserved names are appropriately escaped
ReservedName.objects.create(name='a', order=42)
ReservedName.objects.create(name='b', order=37)
self.assertQuerysetEqual(
ReservedName.objects.all().order_by('order'),
['<ReservedName: b>', '<ReservedName: a>']
)
self.assertQuerysetEqual(
ReservedName.objects.extra(select={'stuff': 'name'}, order_by=('order', 'stuff')),
['<ReservedName: b>', '<ReservedName: a>']
)
class ToFieldTests(TestCase):
def test_in_query(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food__in=[apple, pear])),
{lunch, dinner},
)
def test_reverse_in(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch_apple = Eaten.objects.create(food=apple, meal="lunch")
lunch_pear = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])),
{apple, pear}
)
def test_single_object(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=apple, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food=apple)),
{lunch, dinner}
)
def test_single_object_reverse(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(
set(Food.objects.filter(eaten=lunch)),
{apple}
)
def test_recursive_fk(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(
list(Node.objects.filter(parent=node1)),
[node2]
)
def test_recursive_fk_reverse(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(
list(Node.objects.filter(node=node2)),
[node1]
)
class ConditionalTests(BaseQuerysetTest):
"""Tests whose execution depend on different environment conditions like
Python version or DB backend features"""
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
t1 = Tag.objects.create(name='t1', category=generic)
Tag.objects.create(name='t2', parent=t1, category=generic)
t3 = Tag.objects.create(name='t3', parent=t1)
Tag.objects.create(name='t4', parent=t3)
Tag.objects.create(name='t5', parent=t3)
def test_infinite_loop(self):
# If you're not careful, it's possible to introduce infinite loops via
# default ordering on foreign keys in a cycle. We detect that.
self.assertRaisesMessage(
FieldError,
'Infinite loop caused by ordering.',
lambda: list(LoopX.objects.all()) # Force queryset evaluation with list()
)
self.assertRaisesMessage(
FieldError,
'Infinite loop caused by ordering.',
lambda: list(LoopZ.objects.all()) # Force queryset evaluation with list()
)
# Note that this doesn't cause an infinite loop, since the default
# ordering on the Tag model is empty (and thus defaults to using "id"
# for the related field).
self.assertEqual(len(Tag.objects.order_by('parent')), 5)
# ... but you can still order in a non-recursive fashion among linked
# fields (the previous test failed because the default ordering was
# recursive).
self.assertQuerysetEqual(
LoopX.objects.all().order_by('y__x__y__x__id'),
[]
)
# When grouping without specifying ordering, we add an explicit "ORDER BY NULL"
# portion in MySQL to prevent unnecessary sorting.
@skipUnlessDBFeature('requires_explicit_null_ordering_when_grouping')
def test_null_ordering_added(self):
query = Tag.objects.values_list('parent_id', flat=True).order_by().query
query.group_by = ['parent_id']
sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0]
fragment = "ORDER BY "
pos = sql.find(fragment)
self.assertEqual(sql.find(fragment, pos + 1), -1)
self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment))
# Sqlite 3 does not support passing in more than 1000 parameters except by
# changing a parameter at compilation time.
@skipUnlessDBFeature('supports_1000_query_parameters')
def test_ticket14244(self):
# Test that the "in" lookup works with lists of 1000 items or more.
# The numbers amount is picked to force three different IN batches
# for Oracle, yet to be less than 2100 parameter limit for MSSQL.
numbers = list(range(2050))
Number.objects.all().delete()
Number.objects.bulk_create(Number(num=num) for num in numbers)
self.assertEqual(
Number.objects.filter(num__in=numbers[:1000]).count(),
1000
)
self.assertEqual(
Number.objects.filter(num__in=numbers[:1001]).count(),
1001
)
self.assertEqual(
Number.objects.filter(num__in=numbers[:2000]).count(),
2000
)
self.assertEqual(
Number.objects.filter(num__in=numbers).count(),
len(numbers)
)
class UnionTests(unittest.TestCase):
"""
Tests for the union of two querysets. Bug #12252.
"""
@classmethod
def setUpTestData(cls):
objectas = []
objectbs = []
objectcs = []
a_info = ['one', 'two', 'three']
for name in a_info:
o = ObjectA(name=name)
o.save()
objectas.append(o)
b_info = [('un', 1, objectas[0]), ('deux', 2, objectas[0]), ('trois', 3, objectas[2])]
for name, number, objecta in b_info:
o = ObjectB(name=name, num=number, objecta=objecta)
o.save()
objectbs.append(o)
c_info = [('ein', objectas[2], objectbs[2]), ('zwei', objectas[1], objectbs[1])]
for name, objecta, objectb in c_info:
o = ObjectC(name=name, objecta=objecta, objectb=objectb)
o.save()
objectcs.append(o)
def check_union(self, model, Q1, Q2):
filter = model.objects.filter
self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2)))
self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2)))
def test_A_AB(self):
Q1 = Q(name='two')
Q2 = Q(objectb__name='deux')
self.check_union(ObjectA, Q1, Q2)
def test_A_AB2(self):
Q1 = Q(name='two')
Q2 = Q(objectb__name='deux', objectb__num=2)
self.check_union(ObjectA, Q1, Q2)
def test_AB_ACB(self):
Q1 = Q(objectb__name='deux')
Q2 = Q(objectc__objectb__name='deux')
self.check_union(ObjectA, Q1, Q2)
def test_BAB_BAC(self):
Q1 = Q(objecta__objectb__name='deux')
Q2 = Q(objecta__objectc__name='ein')
self.check_union(ObjectB, Q1, Q2)
def test_BAB_BACB(self):
Q1 = Q(objecta__objectb__name='deux')
Q2 = Q(objecta__objectc__objectb__name='trois')
self.check_union(ObjectB, Q1, Q2)
def test_BA_BCA__BAB_BAC_BCA(self):
Q1 = Q(objecta__name='one', objectc__objecta__name='two')
Q2 = Q(objecta__objectc__name='ein', objectc__objecta__name='three', objecta__objectb__name='trois')
self.check_union(ObjectB, Q1, Q2)
class DefaultValuesInsertTest(TestCase):
def test_no_extra_params(self):
# Ticket #17056 -- affects Oracle
try:
DumbCategory.objects.create()
except TypeError:
self.fail("Creation of an instance of a model with only the PK field shouldn't error out after bulk insert refactoring (#17056)")
class ExcludeTests(TestCase):
@classmethod
def setUpTestData(cls):
f1 = Food.objects.create(name='apples')
Food.objects.create(name='oranges')
Eaten.objects.create(food=f1, meal='dinner')
j1 = Job.objects.create(name='Manager')
r1 = Responsibility.objects.create(description='Playing golf')
j2 = Job.objects.create(name='Programmer')
r2 = Responsibility.objects.create(description='Programming')
JobResponsibilities.objects.create(job=j1, responsibility=r1)
JobResponsibilities.objects.create(job=j2, responsibility=r2)
def test_to_field(self):
self.assertQuerysetEqual(
Food.objects.exclude(eaten__meal='dinner'),
['<Food: oranges>'])
self.assertQuerysetEqual(
Job.objects.exclude(responsibilities__description='Playing golf'),
['<Job: Programmer>'])
self.assertQuerysetEqual(
Responsibility.objects.exclude(jobs__name='Manager'),
['<Responsibility: Programming>'])
def test_ticket14511(self):
alex = Person.objects.get_or_create(name='Alex')[0]
jane = Person.objects.get_or_create(name='Jane')[0]
oracle = Company.objects.get_or_create(name='Oracle')[0]
google = Company.objects.get_or_create(name='Google')[0]
microsoft = Company.objects.get_or_create(name='Microsoft')[0]
intel = Company.objects.get_or_create(name='Intel')[0]
def employ(employer, employee, title):
Employment.objects.get_or_create(employee=employee, employer=employer, title=title)
employ(oracle, alex, 'Engineer')
employ(oracle, alex, 'Developer')
employ(google, alex, 'Engineer')
employ(google, alex, 'Manager')
employ(microsoft, alex, 'Manager')
employ(intel, alex, 'Manager')
employ(microsoft, jane, 'Developer')
employ(intel, jane, 'Manager')
alex_tech_employers = alex.employers.filter(
employment__title__in=('Engineer', 'Developer')).distinct().order_by('name')
self.assertQuerysetEqual(alex_tech_employers, [google, oracle], lambda x: x)
alex_nontech_employers = alex.employers.exclude(
employment__title__in=('Engineer', 'Developer')).distinct().order_by('name')
self.assertQuerysetEqual(alex_nontech_employers, [google, intel, microsoft], lambda x: x)
class ExcludeTest17600(TestCase):
"""
Some regressiontests for ticket #17600. Some of these likely duplicate
other existing tests.
"""
@classmethod
def setUpTestData(cls):
# Create a few Orders.
cls.o1 = Order.objects.create(pk=1)
cls.o2 = Order.objects.create(pk=2)
cls.o3 = Order.objects.create(pk=3)
# Create some OrderItems for the first order with homogeneous
# status_id values
cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1)
cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2)
cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2)
cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3)
cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4)
def test_exclude_plain(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertQuerysetEqual(
Order.objects.exclude(items__status=1),
['<Order: 3>'])
def test_exclude_plain_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertQuerysetEqual(
Order.objects.exclude(items__status=1).distinct(),
['<Order: 3>'])
def test_exclude_with_q_object_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertQuerysetEqual(
Order.objects.exclude(Q(items__status=1)).distinct(),
['<Order: 3>'])
def test_exclude_with_q_object_no_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertQuerysetEqual(
Order.objects.exclude(Q(items__status=1)),
['<Order: 3>'])
def test_exclude_with_q_is_equal_to_plain_exclude(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1).distinct()),
list(Order.objects.exclude(Q(items__status=1)).distinct()))
def test_exclude_with_q_is_equal_to_plain_exclude_variation(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1)),
list(Order.objects.exclude(Q(items__status=1)).distinct()))
@unittest.expectedFailure
def test_only_orders_with_all_items_having_status_1(self):
"""
This should only return orders having ALL items set to status 1, or
those items not having any orders at all. The correct way to write
this query in SQL seems to be using two nested subqueries.
"""
self.assertQuerysetEqual(
Order.objects.exclude(~Q(items__status=1)).distinct(),
['<Order: 1>'])
class Exclude15786(TestCase):
"""Regression test for #15786"""
def test_ticket15786(self):
c1 = SimpleCategory.objects.create(name='c1')
c2 = SimpleCategory.objects.create(name='c2')
OneToOneCategory.objects.create(category=c1)
OneToOneCategory.objects.create(category=c2)
rel = CategoryRelationship.objects.create(first=c1, second=c2)
self.assertEqual(
CategoryRelationship.objects.exclude(
first__onetoonecategory=F('second__onetoonecategory')
).get(), rel
)
class NullInExcludeTest(TestCase):
@classmethod
def setUpTestData(cls):
NullableName.objects.create(name='i1')
NullableName.objects.create()
def test_null_in_exclude_qs(self):
none_val = '' if connection.features.interprets_empty_strings_as_nulls else None
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[]),
['i1', none_val], attrgetter('name'))
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=['i1']),
[none_val], attrgetter('name'))
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=['i3']),
['i1', none_val], attrgetter('name'))
inner_qs = NullableName.objects.filter(name='i1').values_list('name')
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=inner_qs),
[none_val], attrgetter('name'))
# Check that the inner queryset wasn't executed - it should be turned
# into subquery above
self.assertIs(inner_qs._result_cache, None)
@unittest.expectedFailure
def test_col_not_in_list_containing_null(self):
"""
The following case is not handled properly because
SQL's COL NOT IN (list containing null) handling is too weird to
abstract away.
"""
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[None]),
['i1'], attrgetter('name'))
def test_double_exclude(self):
self.assertEqual(
list(NullableName.objects.filter(~~Q(name='i1'))),
list(NullableName.objects.filter(Q(name='i1'))))
self.assertNotIn(
'IS NOT NULL',
str(NullableName.objects.filter(~~Q(name='i1')).query))
class EmptyStringsAsNullTest(TestCase):
"""
Test that filtering on non-null character fields works as expected.
The reason for these tests is that Oracle treats '' as NULL, and this
can cause problems in query construction. Refs #17957.
"""
@classmethod
def setUpTestData(cls):
cls.nc = NamedCategory.objects.create(name='')
def test_direct_exclude(self):
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name__in=['nonexisting']),
[self.nc.pk], attrgetter('pk')
)
def test_joined_exclude(self):
self.assertQuerysetEqual(
DumbCategory.objects.exclude(namedcategory__name__in=['nonexisting']),
[self.nc.pk], attrgetter('pk')
)
def test_21001(self):
foo = NamedCategory.objects.create(name='foo')
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name=''),
[foo.pk], attrgetter('pk')
)
class ProxyQueryCleanupTest(TestCase):
def test_evaluated_proxy_count(self):
"""
Test that generating the query string doesn't alter the query's state
in irreversible ways. Refs #18248.
"""
ProxyCategory.objects.create()
qs = ProxyCategory.objects.all()
self.assertEqual(qs.count(), 1)
str(qs.query)
self.assertEqual(qs.count(), 1)
class WhereNodeTest(TestCase):
class DummyNode(object):
def as_sql(self, compiler, connection):
return 'dummy', []
class MockCompiler(object):
def compile(self, node):
return node.as_sql(self, connection)
def __call__(self, name):
return connection.ops.quote_name(name)
def test_empty_full_handling_conjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[EverythingNode()])
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w.negate()
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
w = WhereNode(children=[NothingNode()])
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[EverythingNode(), EverythingNode()])
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w.negate()
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
w = WhereNode(children=[EverythingNode(), self.DummyNode()])
self.assertEqual(w.as_sql(compiler, connection), ('dummy', []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()])
self.assertEqual(w.as_sql(compiler, connection), ('(dummy AND dummy)', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy AND dummy)', []))
w = WhereNode(children=[NothingNode(), self.DummyNode()])
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
def test_empty_full_handling_disjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[EverythingNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w.negate()
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
w = WhereNode(children=[NothingNode()], connector='OR')
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[EverythingNode(), EverythingNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w.negate()
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
w = WhereNode(children=[EverythingNode(), self.DummyNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w.negate()
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('(dummy OR dummy)', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy OR dummy)', []))
w = WhereNode(children=[NothingNode(), self.DummyNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('dummy', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy)', []))
def test_empty_nodes(self):
compiler = WhereNodeTest.MockCompiler()
empty_w = WhereNode()
w = WhereNode(children=[empty_w, empty_w])
self.assertEqual(w.as_sql(compiler, connection), (None, []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), (None, []))
w.connector = 'OR'
self.assertEqual(w.as_sql(compiler, connection), (None, []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), (None, []))
w = WhereNode(children=[empty_w, NothingNode()], connector='OR')
self.assertRaises(EmptyResultSet, w.as_sql, compiler, connection)
class IteratorExceptionsTest(TestCase):
def test_iter_exceptions(self):
qs = ExtraInfo.objects.only('author')
with self.assertRaises(AttributeError):
list(qs)
def test_invalid_qs_list(self):
# Test for #19895 - second iteration over invalid queryset
# raises errors.
qs = Article.objects.order_by('invalid_column')
self.assertRaises(FieldError, list, qs)
self.assertRaises(FieldError, list, qs)
class NullJoinPromotionOrTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.d1 = ModelD.objects.create(name='foo')
d2 = ModelD.objects.create(name='bar')
cls.a1 = ModelA.objects.create(name='a1', d=cls.d1)
c = ModelC.objects.create(name='c')
b = ModelB.objects.create(name='b', c=c)
cls.a2 = ModelA.objects.create(name='a2', b=b, d=d2)
def test_ticket_17886(self):
# The first Q-object is generating the match, the rest of the filters
# should not remove the match even if they do not match anything. The
# problem here was that b__name generates a LOUTER JOIN, then
# b__c__name generates join to c, which the ORM tried to promote but
# failed as that join isn't nullable.
q_obj = (
Q(d__name='foo') |
Q(b__name='foo') |
Q(b__c__name='foo')
)
qset = ModelA.objects.filter(q_obj)
self.assertEqual(list(qset), [self.a1])
# We generate one INNER JOIN to D. The join is direct and not nullable
# so we can use INNER JOIN for it. However, we can NOT use INNER JOIN
# for the b->c join, as a->b is nullable.
self.assertEqual(str(qset.query).count('INNER JOIN'), 1)
def test_isnull_filter_promotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
def test_null_join_demotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True))
self.assertIn(' INNER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False))
self.assertIn(' INNER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True))
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False))
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
def test_ticket_21366(self):
n = Note.objects.create(note='n', misc='m')
e = ExtraInfo.objects.create(info='info', note=n)
a = Author.objects.create(name='Author1', num=1, extra=e)
Ranking.objects.create(rank=1, author=a)
r1 = Report.objects.create(name='Foo', creator=a)
r2 = Report.objects.create(name='Bar')
Report.objects.create(name='Bar', creator=a)
qs = Report.objects.filter(
Q(creator__ranking__isnull=True) |
Q(creator__ranking__rank=1, name='Foo')
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count(' JOIN '), 2)
self.assertQuerysetEqual(
qs.order_by('name'), [r2, r1], lambda x: x)
def test_ticket_21748(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
i3 = Identifier.objects.create(name='i3')
Program.objects.create(identifier=i1)
Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
self.assertQuerysetEqual(
Identifier.objects.filter(program=None, channel=None),
[i3], lambda x: x)
self.assertQuerysetEqual(
Identifier.objects.exclude(program=None, channel=None).order_by('name'),
[i1, i2], lambda x: x)
def test_ticket_21748_double_negated_and(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
# Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for
# join promotion.
qs1_doubleneg = Identifier.objects.exclude(~Q(program__id=p1.id, channel__id=c1.id)).order_by('pk')
qs1_filter = Identifier.objects.filter(program__id=p1.id, channel__id=c1.id).order_by('pk')
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(str(qs1_filter.query).count('JOIN'),
str(qs1_doubleneg.query).count('JOIN'))
self.assertEqual(2, str(qs1_doubleneg.query).count('INNER JOIN'))
self.assertEqual(str(qs1_filter.query).count('INNER JOIN'),
str(qs1_doubleneg.query).count('INNER JOIN'))
def test_ticket_21748_double_negated_or(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Test OR + doubleneq. The expected result is that channel is LOUTER
# joined, program INNER joined
qs1_filter = Identifier.objects.filter(
Q(program__id=p2.id, channel__id=c1.id)
| Q(program__id=p1.id)
).order_by('pk')
qs1_doubleneg = Identifier.objects.exclude(
~Q(Q(program__id=p2.id, channel__id=c1.id)
| Q(program__id=p1.id))
).order_by('pk')
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(str(qs1_filter.query).count('JOIN'),
str(qs1_doubleneg.query).count('JOIN'))
self.assertEqual(1, str(qs1_doubleneg.query).count('INNER JOIN'))
self.assertEqual(str(qs1_filter.query).count('INNER JOIN'),
str(qs1_doubleneg.query).count('INNER JOIN'))
def test_ticket_21748_complex_filter(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Finally, a more complex case, one time in a way where each
# NOT is pushed to lowest level in the boolean tree, and
# another query where this isn't done.
qs1 = Identifier.objects.filter(
~Q(~Q(program__id=p2.id, channel__id=c1.id)
& Q(program__id=p1.id))).order_by('pk')
qs2 = Identifier.objects.filter(
Q(Q(program__id=p2.id, channel__id=c1.id)
| ~Q(program__id=p1.id))).order_by('pk')
self.assertQuerysetEqual(qs1, qs2, lambda x: x)
self.assertEqual(str(qs1.query).count('JOIN'),
str(qs2.query).count('JOIN'))
self.assertEqual(0, str(qs1.query).count('INNER JOIN'))
self.assertEqual(str(qs1.query).count('INNER JOIN'),
str(qs2.query).count('INNER JOIN'))
class ReverseJoinTrimmingTest(TestCase):
def test_reverse_trimming(self):
# Check that we don't accidentally trim reverse joins - we can't know
# if there is anything on the other side of the join, so trimming
# reverse joins can't be done, ever.
t = Tag.objects.create()
qs = Tag.objects.filter(annotation__tag=t.pk)
self.assertIn('INNER JOIN', str(qs.query))
self.assertEqual(list(qs), [])
class JoinReuseTest(TestCase):
"""
Test that the queries reuse joins sensibly (for example, direct joins
are always reused).
"""
def test_fk_reuse(self):
qs = Annotation.objects.filter(tag__name='foo').filter(tag__name='bar')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_select_related(self):
qs = Annotation.objects.filter(tag__name='foo').select_related('tag')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_annotation(self):
qs = Annotation.objects.filter(tag__name='foo').annotate(cnt=Count('tag__name'))
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_disjunction(self):
qs = Annotation.objects.filter(Q(tag__name='foo') | Q(tag__name='bar'))
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_order_by(self):
qs = Annotation.objects.filter(tag__name='foo').order_by('tag__name')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_revo2o_reuse(self):
qs = Detail.objects.filter(member__name='foo').filter(member__name='foo')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_revfk_noreuse(self):
qs = Author.objects.filter(report__name='r4').filter(report__name='r1')
self.assertEqual(str(qs.query).count('JOIN'), 2)
class DisjunctionPromotionTests(TestCase):
def test_disjuction_promotion_select_related(self):
fk1 = FK1.objects.create(f1='f1', f2='f2')
basea = BaseA.objects.create(a=fk1)
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
self.assertEqual(str(qs.query).count(' JOIN '), 0)
qs = qs.select_related('a', 'b')
self.assertEqual(str(qs.query).count(' INNER JOIN '), 0)
self.assertEqual(str(qs.query).count(' LEFT OUTER JOIN '), 2)
with self.assertNumQueries(1):
self.assertQuerysetEqual(qs, [basea], lambda x: x)
self.assertEqual(qs[0].a, fk1)
self.assertIs(qs[0].b, None)
def test_disjunction_promotion1(self):
# Pre-existing join, add two ORed filters to the same join,
# all joins can be INNER JOINS.
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(Q(b__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
# Reverse the order of AND and OR filters.
qs = BaseA.objects.filter(Q(b__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
def test_disjunction_promotion2(self):
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# Now we have two different joins in an ORed condition, these
# must be OUTER joins. The pre-existing join should remain INNER.
qs = qs.filter(Q(b__f1='foo') | Q(c__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
# Reverse case.
qs = BaseA.objects.filter(Q(b__f1='foo') | Q(c__f2='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
def test_disjunction_promotion3(self):
qs = BaseA.objects.filter(a__f2='bar')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# The ANDed a__f2 filter allows us to use keep using INNER JOIN
# even inside the ORed case. If the join to a__ returns nothing,
# the ANDed filter for a__f2 can't be true.
qs = qs.filter(Q(a__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion3_demote(self):
# This one needs demotion logic: the first filter causes a to be
# outer joined, the second filter makes it inner join again.
qs = BaseA.objects.filter(
Q(a__f1='foo') | Q(b__f2='foo')).filter(a__f2='bar')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion4_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
# Demote needed for the "a" join. It is marked as outer join by
# above filter (even if it is trimmed away).
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion4(self):
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion5_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
# Note that the above filters on a force the join to an
# inner join even if it is trimmed.
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = qs.filter(Q(a__f1='foo') | Q(b__f1='foo'))
# So, now the a__f1 join doesn't need promotion.
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# But b__f1 does.
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
qs = BaseA.objects.filter(Q(a__f1='foo') | Q(b__f1='foo'))
# Now the join to a is created as LOUTER
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion6(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
def test_disjunction_promotion7(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') | (Q(b__f1='foo') & Q(a__f1='bar')))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
qs = BaseA.objects.filter(
(Q(a__f1='foo') | Q(b__f1='foo')) & (Q(a__f1='bar') | Q(c__f1='foo'))
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
self.assertEqual(str(qs.query).count('INNER JOIN'), 0)
qs = BaseA.objects.filter(
(Q(a__f1='foo') | (Q(a__f1='bar')) & (Q(b__f1='bar') | Q(c__f1='foo')))
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion_fexpression(self):
qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(a__f2=F('b__f2')) | Q(c__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | (Q(pk=1) & Q(pk=2)))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count('INNER JOIN'), 0)
class ManyToManyExcludeTest(TestCase):
def test_exclude_many_to_many(self):
Identifier.objects.create(name='extra')
program = Program.objects.create(identifier=Identifier.objects.create(name='program'))
channel = Channel.objects.create(identifier=Identifier.objects.create(name='channel'))
channel.programs.add(program)
# channel contains 'program1', so all Identifiers except that one
# should be returned
self.assertQuerysetEqual(
Identifier.objects.exclude(program__channel=channel).order_by('name'),
['<Identifier: channel>', '<Identifier: extra>']
)
self.assertQuerysetEqual(
Identifier.objects.exclude(program__channel=None).order_by('name'),
['<Identifier: program>']
)
def test_ticket_12823(self):
pg3 = Page.objects.create(text='pg3')
pg2 = Page.objects.create(text='pg2')
pg1 = Page.objects.create(text='pg1')
pa1 = Paragraph.objects.create(text='pa1')
pa1.page = [pg1, pg2]
pa2 = Paragraph.objects.create(text='pa2')
pa2.page = [pg2, pg3]
pa3 = Paragraph.objects.create(text='pa3')
ch1 = Chapter.objects.create(title='ch1', paragraph=pa1)
ch2 = Chapter.objects.create(title='ch2', paragraph=pa2)
ch3 = Chapter.objects.create(title='ch3', paragraph=pa3)
b1 = Book.objects.create(title='b1', chapter=ch1)
b2 = Book.objects.create(title='b2', chapter=ch2)
b3 = Book.objects.create(title='b3', chapter=ch3)
q = Book.objects.exclude(chapter__paragraph__page__text='pg1')
self.assertNotIn('IS NOT NULL', str(q.query))
self.assertEqual(len(q), 2)
self.assertNotIn(b1, q)
self.assertIn(b2, q)
self.assertIn(b3, q)
class RelabelCloneTest(TestCase):
def test_ticket_19964(self):
my1 = MyObject.objects.create(data='foo')
my1.parent = my1
my1.save()
my2 = MyObject.objects.create(data='bar', parent=my1)
parents = MyObject.objects.filter(parent=F('id'))
children = MyObject.objects.filter(parent__in=parents).exclude(parent=F('id'))
self.assertEqual(list(parents), [my1])
# Evaluating the children query (which has parents as part of it) does
# not change results for the parents query.
self.assertEqual(list(children), [my2])
self.assertEqual(list(parents), [my1])
class Ticket20101Tests(TestCase):
def test_ticket_20101(self):
"""
Tests QuerySet ORed combining in exclude subquery case.
"""
t = Tag.objects.create(name='foo')
a1 = Annotation.objects.create(tag=t, name='a1')
a2 = Annotation.objects.create(tag=t, name='a2')
a3 = Annotation.objects.create(tag=t, name='a3')
n = Note.objects.create(note='foo', misc='bar')
qs1 = Note.objects.exclude(annotation__in=[a1, a2])
qs2 = Note.objects.filter(annotation__in=[a3])
self.assertIn(n, qs1)
self.assertNotIn(n, qs2)
self.assertIn(n, (qs1 | qs2))
class EmptyStringPromotionTests(TestCase):
def test_empty_string_promotion(self):
qs = RelatedObject.objects.filter(single__name='')
if connection.features.interprets_empty_strings_as_nulls:
self.assertIn('LEFT OUTER JOIN', str(qs.query))
else:
self.assertNotIn('LEFT OUTER JOIN', str(qs.query))
class ValuesSubqueryTests(TestCase):
def test_values_in_subquery(self):
# Check that if a values() queryset is used, then the given values
# will be used instead of forcing use of the relation's field.
o1 = Order.objects.create(id=-2)
o2 = Order.objects.create(id=-1)
oi1 = OrderItem.objects.create(order=o1, status=0)
oi1.status = oi1.pk
oi1.save()
OrderItem.objects.create(order=o2, status=0)
# The query below should match o1 as it has related order_item
# with id == status.
self.assertQuerysetEqual(
Order.objects.filter(items__in=OrderItem.objects.values_list('status')),
[o1.pk], lambda x: x.pk)
class DoubleInSubqueryTests(TestCase):
def test_double_subquery_in(self):
lfa1 = LeafA.objects.create(data='foo')
lfa2 = LeafA.objects.create(data='bar')
lfb1 = LeafB.objects.create(data='lfb1')
lfb2 = LeafB.objects.create(data='lfb2')
Join.objects.create(a=lfa1, b=lfb1)
Join.objects.create(a=lfa2, b=lfb2)
leaf_as = LeafA.objects.filter(data='foo').values_list('pk', flat=True)
joins = Join.objects.filter(a__in=leaf_as).values_list('b__id', flat=True)
qs = LeafB.objects.filter(pk__in=joins)
self.assertQuerysetEqual(
qs, [lfb1], lambda x: x)
class Ticket18785Tests(TestCase):
def test_ticket_18785(self):
# Test join trimming from ticket18785
qs = Item.objects.exclude(
note__isnull=False
).filter(
name='something', creator__extra__isnull=True
).order_by()
self.assertEqual(1, str(qs.query).count('INNER JOIN'))
self.assertEqual(0, str(qs.query).count('OUTER JOIN'))
class Ticket20788Tests(TestCase):
def test_ticket_20788(self):
Paragraph.objects.create()
paragraph = Paragraph.objects.create()
page = paragraph.page.create()
chapter = Chapter.objects.create(paragraph=paragraph)
Book.objects.create(chapter=chapter)
paragraph2 = Paragraph.objects.create()
Page.objects.create()
chapter2 = Chapter.objects.create(paragraph=paragraph2)
book2 = Book.objects.create(chapter=chapter2)
sentences_not_in_pub = Book.objects.exclude(
chapter__paragraph__page=page)
self.assertQuerysetEqual(
sentences_not_in_pub, [book2], lambda x: x)
class Ticket12807Tests(TestCase):
def test_ticket_12807(self):
p1 = Paragraph.objects.create()
p2 = Paragraph.objects.create()
# The ORed condition below should have no effect on the query - the
# ~Q(pk__in=[]) will always be True.
qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk))
self.assertQuerysetEqual(qs, [p1], lambda x: x)
class RelatedLookupTypeTests(TestCase):
error = 'Cannot query "%s": Must be "%s" instance.'
@classmethod
def setUpTestData(cls):
cls.oa = ObjectA.objects.create(name="oa")
cls.poa = ProxyObjectA.objects.get(name="oa")
cls.coa = ChildObjectA.objects.create(name="coa")
cls.wrong_type = Order.objects.create(id=cls.oa.pk)
cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1)
ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2)
cls.pob = ProxyObjectB.objects.all()
ObjectC.objects.create(childobjecta=cls.coa)
def test_wrong_type_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup.
"""
# Passing incorrect object type
with self.assertRaisesMessage(ValueError,
self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.get(objecta=self.wrong_type)
with self.assertRaisesMessage(ValueError,
self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta__in=[self.wrong_type])
with self.assertRaisesMessage(ValueError,
self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta=self.wrong_type)
with self.assertRaisesMessage(ValueError,
self.error % (self.wrong_type, ObjectB._meta.object_name)):
ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob])
# Passing an object of the class on which query is done.
with self.assertRaisesMessage(ValueError,
self.error % (self.ob, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta__in=[self.poa, self.ob])
with self.assertRaisesMessage(ValueError,
self.error % (self.ob, ChildObjectA._meta.object_name)):
ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob])
def test_wrong_backward_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup for backward relations.
"""
with self.assertRaisesMessage(ValueError,
self.error % (self.oa, ObjectB._meta.object_name)):
ObjectA.objects.filter(objectb__in=[self.oa, self.ob])
with self.assertRaisesMessage(ValueError,
self.error % (self.oa, ObjectB._meta.object_name)):
ObjectA.objects.exclude(objectb=self.oa)
with self.assertRaisesMessage(ValueError,
self.error % (self.wrong_type, ObjectB._meta.object_name)):
ObjectA.objects.get(objectb=self.wrong_type)
def test_correct_lookup(self):
"""
When passing proxy model objects, child objects, or parent objects,
lookups work fine.
"""
out_a = ['<ObjectA: oa>', ]
out_b = ['<ObjectB: ob>', '<ObjectB: pob>']
out_c = ['<ObjectC: >']
# proxy model objects
self.assertQuerysetEqual(ObjectB.objects.filter(objecta=self.poa).order_by('name'), out_b)
self.assertQuerysetEqual(ObjectA.objects.filter(objectb__in=self.pob).order_by('pk'), out_a * 2)
# child objects
self.assertQuerysetEqual(ObjectB.objects.filter(objecta__in=[self.coa]), [])
self.assertQuerysetEqual(ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by('name'), out_b)
# parent objects
self.assertQuerysetEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c)
# Test for #23226
with self.assertNumQueries(0):
ObjectB.objects.filter(objecta__in=ObjectA.objects.all())
def test_values_queryset_lookup(self):
"""
#23396 - Ensure ValueQuerySets are not checked for compatibility with the lookup field
"""
self.assertQuerysetEqual(ObjectB.objects.filter(
objecta__in=ObjectB.objects.all().values_list('pk')
).order_by('pk'), ['<ObjectB: ob>', '<ObjectB: pob>'])
class Ticket14056Tests(TestCase):
def test_ticket_14056(self):
s1 = SharedConnection.objects.create(data='s1')
s2 = SharedConnection.objects.create(data='s2')
s3 = SharedConnection.objects.create(data='s3')
PointerA.objects.create(connection=s2)
expected_ordering = (
[s1, s3, s2] if connection.features.nulls_order_largest
else [s2, s1, s3]
)
self.assertQuerysetEqual(
SharedConnection.objects.order_by('-pointera__connection', 'pk'),
expected_ordering, lambda x: x
)
class Ticket20955Tests(TestCase):
def test_ticket_20955(self):
jack = Staff.objects.create(name='jackstaff')
jackstaff = StaffUser.objects.create(staff=jack)
jill = Staff.objects.create(name='jillstaff')
jillstaff = StaffUser.objects.create(staff=jill)
task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task")
task_get = Task.objects.get(pk=task.pk)
# Load data so that assertNumQueries doesn't complain about the get
# version's queries.
task_get.creator.staffuser.staff
task_get.owner.staffuser.staff
qs = Task.objects.select_related(
'creator__staffuser__staff', 'owner__staffuser__staff')
self.assertEqual(str(qs.query).count(' JOIN '), 6)
task_select_related = qs.get(pk=task.pk)
with self.assertNumQueries(0):
self.assertEqual(task_select_related.creator.staffuser.staff,
task_get.creator.staffuser.staff)
self.assertEqual(task_select_related.owner.staffuser.staff,
task_get.owner.staffuser.staff)
class Ticket21203Tests(TestCase):
def test_ticket_21203(self):
p = Ticket21203Parent.objects.create(parent_bool=True)
c = Ticket21203Child.objects.create(parent=p)
qs = Ticket21203Child.objects.select_related('parent').defer('parent__created')
self.assertQuerysetEqual(qs, [c], lambda x: x)
self.assertIs(qs[0].parent.parent_bool, True)
class ValuesJoinPromotionTests(TestCase):
def test_values_no_promotion_for_existing(self):
qs = Node.objects.filter(parent__parent__isnull=False)
self.assertIn(' INNER JOIN ', str(qs.query))
qs = qs.values('parent__parent__id')
self.assertIn(' INNER JOIN ', str(qs.query))
# Make sure there is a left outer join without the filter.
qs = Node.objects.values('parent__parent__id')
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = ObjectB.objects.values('objecta__name')
self.assertIn(' INNER JOIN ', str(qs.query))
def test_ticket_21376(self):
a = ObjectA.objects.create()
ObjectC.objects.create(objecta=a)
qs = ObjectC.objects.filter(
Q(objecta=a) | Q(objectb__objecta=a),
)
qs = qs.filter(
Q(objectb=1) | Q(objecta=a),
)
self.assertEqual(qs.count(), 1)
tblname = connection.ops.quote_name(ObjectB._meta.db_table)
self.assertIn(' LEFT OUTER JOIN %s' % tblname, str(qs.query))
class ForeignKeyToBaseExcludeTests(TestCase):
def test_ticket_21787(self):
sc1 = SpecialCategory.objects.create(special_name='sc1', name='sc1')
sc2 = SpecialCategory.objects.create(special_name='sc2', name='sc2')
sc3 = SpecialCategory.objects.create(special_name='sc3', name='sc3')
c1 = CategoryItem.objects.create(category=sc1)
CategoryItem.objects.create(category=sc2)
self.assertQuerysetEqual(
SpecialCategory.objects.exclude(
categoryitem__id=c1.pk).order_by('name'),
[sc2, sc3], lambda x: x
)
self.assertQuerysetEqual(
SpecialCategory.objects.filter(categoryitem__id=c1.pk),
[sc1], lambda x: x
)
class ReverseM2MCustomPkTests(TestCase):
def test_ticket_21879(self):
cpt1 = CustomPkTag.objects.create(id='cpt1', tag='cpt1')
cp1 = CustomPk.objects.create(name='cp1', extra='extra')
cp1.custompktag_set.add(cpt1)
self.assertQuerysetEqual(
CustomPk.objects.filter(custompktag=cpt1), [cp1],
lambda x: x)
self.assertQuerysetEqual(
CustomPkTag.objects.filter(custom_pk=cp1), [cpt1],
lambda x: x)
class Ticket22429Tests(TestCase):
def test_ticket_22429(self):
sc1 = School.objects.create()
st1 = Student.objects.create(school=sc1)
sc2 = School.objects.create()
st2 = Student.objects.create(school=sc2)
cr = Classroom.objects.create(school=sc1)
cr.students.add(st1)
queryset = Student.objects.filter(~Q(classroom__school=F('school')))
self.assertQuerysetEqual(queryset, [st2], lambda x: x)
class Ticket23605Tests(TestCase):
def test_ticket_23605(self):
# Test filtering on a complicated q-object from ticket's report.
# The query structure is such that we have multiple nested subqueries.
# The original problem was that the inner queries weren't relabeled
# correctly.
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=10000.0)
Ticket23605B.objects.create(
field_b0=10000.0, field_b1=True,
modelc_fk=c1, modela_fk=a1)
complex_q = Q(pk__in=Ticket23605A.objects.filter(
Q(
# True for a1 as field_b0 = 10000, field_c0=10000
# False for a2 as no ticket23605b found
ticket23605b__field_b0__gte=1000000 /
F("ticket23605b__modelc_fk__field_c0")
) &
# True for a1 (field_b1=True)
Q(ticket23605b__field_b1=True) &
~Q(ticket23605b__pk__in=Ticket23605B.objects.filter(
~(
# Same filters as above commented filters, but
# double-negated (one for Q() above, one for
# parentheses). So, again a1 match, a2 not.
Q(field_b1=True) &
Q(field_b0__gte=1000000 / F("modelc_fk__field_c0"))
)
))).filter(ticket23605b__field_b1=True))
qs1 = Ticket23605A.objects.filter(complex_q)
self.assertQuerysetEqual(qs1, [a1], lambda x: x)
qs2 = Ticket23605A.objects.exclude(complex_q)
self.assertQuerysetEqual(qs2, [a2], lambda x: x)
| runekaagaard/django-contrib-locking | tests/queries/tests.py | Python | bsd-3-clause | 154,394 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0003_event_is_published'),
]
operations = [
migrations.AlterModelOptions(
name='attendee',
options={'verbose_name': 'Asistentes', 'verbose_name_plural': 'Asistentes'},
),
]
| PythonMid/pymidweb | pythonmid/apps/events/migrations/0004_auto_20150527_0123.py | Python | gpl-2.0 | 415 |
from jtapi import *
import os
import sys
import re
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
# here comes your code
data = dict()
output_args = dict()
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
| brainy-minds/Jterator | skeleton/modules/myPythonModule.py | Python | mit | 532 |
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
LICENSE = open(os.path.join(os.path.dirname(__file__), 'LICENSE.txt')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-spreadsheetresponsemixin',
version='0.1.10',
packages=['spreadsheetresponsemixin'],
include_package_data=True,
license=LICENSE,
description='A mixin for views with a queryset that provides a CSV/Excel export.',
long_description=README,
url='https://github.com/birdsarah/django-spreadsheetresponsemixin',
author='Sarah Bird',
author_email='sarah@bonvaya.com',
install_requires=['django>=1.5', 'openpyxl>=2.0.3'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content'],
)
| aptivate/django-spreadsheetresponsemixin | setup.py | Python | gpl-3.0 | 1,365 |
# -*- coding: utf-8 -*-
import os
import pytz
import time
import click
import signal
import threading
import speedtest_cli as stc
from collections import OrderedDict
from datetime import datetime as dt
stc.shutdown_event = threading.Event()
class SpeedTest(object):
def __init__(self, server_id=None):
self.server_id = server_id
self.config = stc.getConfig()
self.servers = stc.closestServers(self.config.get('client'))
self.best = stc.getBestServer(filter(self.valid_server, self.servers))
def valid_server(self, server):
if self.server_id is None:
return True
return server['id'] == self.server_id
def run(self):
print("Using server: {} in {} ({})".format(
self.best['sponsor'], self.best['name'], self.best['id']))
sizes = [350, 500, 750, 1000, 1500, 2000, 2500, 3000, 3500, 4000]
urls = []
for size in sizes:
for i in xrange(0, 4):
urls.append('{}/random{}x{}.jpg'.format(
os.path.dirname(self.best['url']), size, size))
dlspeed = stc.downloadSpeed(urls, True)
ulspeed = stc.uploadSpeed(self.best['url'], sizes, True)
now = pytz.utc.localize(dt.now())
dlspeed = (dlspeed / 1000. / 1000.) * 8.
ulspeed = (ulspeed / 1000. / 1000.) * 8.
print("Download: {:.4f} Mbits/s | upload: {:.4f} Mbits/s".format(
dlspeed, ulspeed))
return OrderedDict([('date', now.isoformat()),
('download', dlspeed),
('upload', ulspeed),
('server', self.best['id'])])
@click.command()
@click.option('--server', default=None)
@click.option('--interval', type=int, default=None)
@click.option('--output')
def bandviz(server, interval, output):
signal.signal(signal.SIGINT, stc.ctrl_c)
st = SpeedTest(server)
while True:
result = st.run()
if output:
with open(output, 'a') as out:
out.write('"{}",{},{},{}\n'.format(*result.values()))
if interval:
print "INTERVAL", interval
time.sleep(interval * 60) # interval in minutes
else:
break
| elbaschid/bandviz | bandviz/cli.py | Python | mit | 2,253 |
# Generated by Django 2.0.8 on 2018-10-03 17:24
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('adesao', '0011_sistemacultura_diligencia_simples'),
]
operations = [
migrations.AlterModelOptions(
name='sistemacultura',
options={'ordering': ['ente_federado', '-alterado_em']},
),
migrations.AddField(
model_name='sistemacultura',
name='alterado_em',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='Alterado em'),
),
]
| culturagovbr/sistema-nacional-cultura | adesao/migrations/0014_auto_20181003_1424.py | Python | agpl-3.0 | 645 |
#!/usr/bin/python
"""
Software package management library.
This is an abstraction layer on top of the existing distributions high level
package managers. It supports package operations useful for testing purposes,
and multiple high level package managers (here called backends). If you want
to make this lib to support your particular package manager/distro, please
implement the given backend class.
@author: Higor Vieira Alves (halves@br.ibm.com)
@author: Lucas Meneghel Rodrigues (lmr@redhat.com)
@author: Ramon de Carvalho Valle (rcvalle@br.ibm.com)
@copyright: IBM 2008-2009
@copyright: Red Hat 2009-2010
"""
import os, re, logging, ConfigParser, optparse, random, string
try:
import yum
except Exception:
pass
try:
import autotest.common as common
except ImportError:
import common
from autotest_lib.client.bin import os_dep, utils
from autotest_lib.client.common_lib import error
from autotest_lib.client.common_lib import logging_config, logging_manager
def generate_random_string(length):
"""
Return a random string using alphanumeric characters.
@length: Length of the string that will be generated.
"""
r = random.SystemRandom()
str = ""
chars = string.letters + string.digits
while length > 0:
str += r.choice(chars)
length -= 1
return str
class SoftwareManagerLoggingConfig(logging_config.LoggingConfig):
"""
Used with the sole purpose of providing convenient logging setup
for the KVM test auxiliary programs.
"""
def configure_logging(self, results_dir=None, verbose=False):
super(SoftwareManagerLoggingConfig, self).configure_logging(
use_console=True,
verbose=verbose)
class SystemInspector(object):
"""
System inspector class.
This may grow up to include more complete reports of operating system and
machine properties.
"""
def __init__(self):
"""
Probe system, and save information for future reference.
"""
self.distro = utils.get_os_vendor()
self.high_level_pms = ['apt-get', 'yum', 'zypper']
def get_package_management(self):
"""
Determine the supported package management systems present on the
system. If more than one package management system installed, try
to find the best supported system.
"""
list_supported = []
for high_level_pm in self.high_level_pms:
try:
os_dep.command(high_level_pm)
list_supported.append(high_level_pm)
except Exception:
pass
pm_supported = None
if len(list_supported) == 0:
pm_supported = None
if len(list_supported) == 1:
pm_supported = list_supported[0]
elif len(list_supported) > 1:
if 'apt-get' in list_supported and self.distro in ['Debian', 'Ubuntu']:
pm_supported = 'apt-get'
elif 'yum' in list_supported and self.distro == 'Fedora':
pm_supported = 'yum'
else:
pm_supported = list_supported[0]
logging.debug('Package Manager backend: %s' % pm_supported)
return pm_supported
class SoftwareManager(object):
"""
Package management abstraction layer.
It supports a set of common package operations for testing purposes, and it
uses the concept of a backend, a helper class that implements the set of
operations of a given package management tool.
"""
def __init__(self):
"""
Class constructor.
Determines the best supported package management system for the given
operating system running and initializes the appropriate backend.
"""
inspector = SystemInspector()
backend_type = inspector.get_package_management()
if backend_type == 'yum':
self.backend = YumBackend()
elif backend_type == 'zypper':
self.backend = ZypperBackend()
elif backend_type == 'apt-get':
self.backend = AptBackend()
else:
raise NotImplementedError('Unimplemented package management '
'system: %s.' % backend_type)
def check_installed(self, name, version=None, arch=None):
"""
Check whether a package is installed on this system.
@param name: Package name.
@param version: Package version.
@param arch: Package architecture.
"""
return self.backend.check_installed(name, version, arch)
def list_all(self):
"""
List all installed packages.
"""
return self.backend.list_all()
def list_files(self, name):
"""
Get a list of all files installed by package [name].
@param name: Package name.
"""
return self.backend.list_files(name)
def install(self, name):
"""
Install package [name].
@param name: Package name.
"""
return self.backend.install(name)
def remove(self, name):
"""
Remove package [name].
@param name: Package name.
"""
return self.backend.remove(name)
def add_repo(self, url):
"""
Add package repo described by [url].
@param name: URL of the package repo.
"""
return self.backend.add_repo(url)
def remove_repo(self, url):
"""
Remove package repo described by [url].
@param url: URL of the package repo.
"""
return self.backend.remove_repo(url)
def upgrade(self):
"""
Upgrade all packages available.
"""
return self.backend.upgrade()
def provides(self, file):
"""
Returns a list of packages that provides a given capability to the
system (be it a binary, a library).
@param file: Path to the file.
"""
return self.backend.provides(file)
def install_what_provides(self, file):
"""
Installs package that provides [file].
@param file: Path to file.
"""
provides = self.provides(file)
if provides is not None:
self.install(provides)
else:
logging.warning('No package seems to provide %s', file)
class RpmBackend(object):
"""
This class implements operations executed with the rpm package manager.
rpm is a lower level package manager, used by higher level managers such
as yum and zypper.
"""
def __init__(self):
self.lowlevel_base_cmd = os_dep.command('rpm')
def _check_installed_version(self, name, version):
"""
Helper for the check_installed public method.
@param name: Package name.
@param version: Package version.
"""
cmd = (self.lowlevel_base_cmd + ' -q --qf %{VERSION} ' + name +
' 2> /dev/null')
inst_version = utils.system_output(cmd)
if inst_version >= version:
return True
else:
return False
def check_installed(self, name, version=None, arch=None):
"""
Check if package [name] is installed.
@param name: Package name.
@param version: Package version.
@param arch: Package architecture.
"""
if arch:
cmd = (self.lowlevel_base_cmd + ' -q --qf %{ARCH} ' + name +
' 2> /dev/null')
inst_archs = utils.system_output(cmd)
inst_archs = inst_archs.split('\n')
for inst_arch in inst_archs:
if inst_arch == arch:
return self._check_installed_version(name, version)
return False
elif version:
return self._check_installed_version(name, version)
else:
cmd = 'rpm -q ' + name + ' 2> /dev/null'
return (os.system(cmd) == 0)
def list_all(self):
"""
List all installed packages.
"""
installed_packages = utils.system_output('rpm -qa').splitlines()
return installed_packages
def list_files(self, name):
"""
List files installed on the system by package [name].
@param name: Package name.
"""
path = os.path.abspath(name)
if os.path.isfile(path):
option = '-qlp'
name = path
else:
option = '-ql'
l_cmd = 'rpm' + ' ' + option + ' ' + name + ' 2> /dev/null'
try:
result = utils.system_output(l_cmd)
list_files = result.split('\n')
return list_files
except error.CmdError:
return []
class DpkgBackend(object):
"""
This class implements operations executed with the dpkg package manager.
dpkg is a lower level package manager, used by higher level managers such
as apt and aptitude.
"""
def __init__(self):
self.lowlevel_base_cmd = os_dep.command('dpkg')
def check_installed(self, name):
if os.path.isfile(name):
n_cmd = (self.lowlevel_base_cmd + ' -f ' + name +
' Package 2>/dev/null')
name = utils.system_output(n_cmd)
i_cmd = self.lowlevel_base_cmd + ' -s ' + name + ' 2>/dev/null'
# Checking if package is installed
package_status = utils.system_output(i_cmd, ignore_status=True)
not_inst_pattern = re.compile('not-installed', re.IGNORECASE)
dpkg_not_installed = re.search(not_inst_pattern, package_status)
if dpkg_not_installed:
return False
return True
def list_all(self):
"""
List all packages available in the system.
"""
installed_packages = []
raw_list = utils.system_output('dpkg -l').splitlines()[5:]
for line in raw_list:
parts = line.split()
if parts[0] == "ii": # only grab "installed" packages
installed_packages.append("%s-%s" % (parts[1], parts[2]))
def list_files(self, package):
"""
List files installed by package [package].
@param package: Package name.
@return: List of paths installed by package.
"""
if os.path.isfile(package):
l_cmd = self.lowlevel_base_cmd + ' -c ' + package
else:
l_cmd = self.lowlevel_base_cmd + ' -l ' + package
return utils.system_output(l_cmd).split('\n')
class YumBackend(RpmBackend):
"""
Implements the yum backend for software manager.
Set of operations for the yum package manager, commonly found on Yellow Dog
Linux and Red Hat based distributions, such as Fedora and Red Hat
Enterprise Linux.
"""
def __init__(self):
"""
Initializes the base command and the yum package repository.
"""
super(YumBackend, self).__init__()
executable = os_dep.command('yum')
base_arguments = '-y'
self.base_command = executable + ' ' + base_arguments
self.repo_file_path = '/etc/yum.repos.d/autotest.repo'
self.cfgparser = ConfigParser.ConfigParser()
self.cfgparser.read(self.repo_file_path)
y_cmd = executable + ' --version | head -1'
self.yum_version = utils.system_output(y_cmd, ignore_status=True)
logging.debug('Yum backend initialized')
logging.debug('Yum version: %s' % self.yum_version)
self.yum_base = yum.YumBase()
def _cleanup(self):
"""
Clean up the yum cache so new package information can be downloaded.
"""
utils.system("yum clean all")
def install(self, name):
"""
Installs package [name]. Handles local installs.
"""
if os.path.isfile(name):
name = os.path.abspath(name)
command = 'localinstall'
else:
command = 'install'
i_cmd = self.base_command + ' ' + command + ' ' + name
try:
utils.system(i_cmd)
return True
except Exception:
return False
def remove(self, name):
"""
Removes package [name].
@param name: Package name (eg. 'ipython').
"""
r_cmd = self.base_command + ' ' + 'erase' + ' ' + name
try:
utils.system(r_cmd)
return True
except Exception:
return False
def add_repo(self, url):
"""
Adds package repository located on [url].
@param url: Universal Resource Locator of the repository.
"""
# Check if we URL is already set
for section in self.cfgparser.sections():
for option, value in self.cfgparser.items(section):
if option == 'url' and value == url:
return True
# Didn't find it, let's set it up
while True:
section_name = 'software_manager' + '_' + generate_random_string(4)
if not self.cfgparser.has_section(section_name):
break
self.cfgparser.add_section(section_name)
self.cfgparser.set(section_name, 'name',
'Repository added by the autotest software manager.')
self.cfgparser.set(section_name, 'url', url)
self.cfgparser.set(section_name, 'enabled', 1)
self.cfgparser.set(section_name, 'gpgcheck', 0)
self.cfgparser.write(self.repo_file_path)
def remove_repo(self, url):
"""
Removes package repository located on [url].
@param url: Universal Resource Locator of the repository.
"""
for section in self.cfgparser.sections():
for option, value in self.cfgparser.items(section):
if option == 'url' and value == url:
self.cfgparser.remove_section(section)
self.cfgparser.write(self.repo_file_path)
def upgrade(self):
"""
Upgrade all available packages.
"""
r_cmd = self.base_command + ' ' + 'update'
try:
utils.system(r_cmd)
return True
except Exception:
return False
def provides(self, name):
"""
Returns a list of packages that provides a given capability.
@param name: Capability name (eg, 'foo').
"""
d_provides = self.yum_base.searchPackageProvides(args=[name])
provides_list = [key for key in d_provides]
if provides_list:
logging.info("Package %s provides %s", provides_list[0], name)
return str(provides_list[0])
else:
return None
class ZypperBackend(RpmBackend):
"""
Implements the zypper backend for software manager.
Set of operations for the zypper package manager, found on SUSE Linux.
"""
def __init__(self):
"""
Initializes the base command and the yum package repository.
"""
super(ZypperBackend, self).__init__()
self.base_command = os_dep.command('zypper') + ' -n'
z_cmd = self.base_command + ' --version'
self.zypper_version = utils.system_output(z_cmd, ignore_status=True)
logging.debug('Zypper backend initialized')
logging.debug('Zypper version: %s' % self.zypper_version)
def install(self, name):
"""
Installs package [name]. Handles local installs.
@param name: Package Name.
"""
path = os.path.abspath(name)
i_cmd = self.base_command + ' install -l ' + name
try:
utils.system(i_cmd)
return True
except Exception:
return False
def add_repo(self, url):
"""
Adds repository [url].
@param url: URL for the package repository.
"""
ar_cmd = self.base_command + ' addrepo ' + url
try:
utils.system(ar_cmd)
return True
except Exception:
return False
def remove_repo(self, url):
"""
Removes repository [url].
@param url: URL for the package repository.
"""
rr_cmd = self.base_command + ' removerepo ' + url
try:
utils.system(rr_cmd)
return True
except Exception:
return False
def remove(self, name):
"""
Removes package [name].
"""
r_cmd = self.base_command + ' ' + 'erase' + ' ' + name
try:
utils.system(r_cmd)
return True
except Exception:
return False
def upgrade(self):
"""
Upgrades all packages of the system.
"""
u_cmd = self.base_command + ' update -l'
try:
utils.system(u_cmd)
return True
except Exception:
return False
def provides(self, name):
"""
Searches for what provides a given file.
@param name: File path.
"""
p_cmd = self.base_command + ' what-provides ' + name
list_provides = []
try:
p_output = utils.system_output(p_cmd).split('\n')[4:]
for line in p_output:
line = [a.strip() for a in line.split('|')]
try:
state, pname, type, version, arch, repository = line
if pname not in list_provides:
list_provides.append(pname)
except IndexError:
pass
if len(list_provides) > 1:
logging.warning('More than one package found, '
'opting by the first queue result')
if list_provides:
logging.info("Package %s provides %s", list_provides[0], name)
return list_provides[0]
return None
except Exception:
return None
class AptBackend(DpkgBackend):
"""
Implements the apt backend for software manager.
Set of operations for the apt package manager, commonly found on Debian and
Debian based distributions, such as Ubuntu Linux.
"""
def __init__(self):
"""
Initializes the base command and the debian package repository.
"""
super(AptBackend, self).__init__()
executable = os_dep.command('apt-get')
self.base_command = executable + ' -y'
self.repo_file_path = '/etc/apt/sources.list.d/autotest'
self.apt_version = utils.system_output('apt-get -v | head -1',
ignore_status=True)
logging.debug('Apt backend initialized')
logging.debug('apt version: %s' % self.apt_version)
def install(self, name):
"""
Installs package [name].
@param name: Package name.
"""
command = 'install'
i_cmd = self.base_command + ' ' + command + ' ' + name
try:
utils.system(i_cmd)
return True
except Exception:
return False
def remove(self, name):
"""
Remove package [name].
@param name: Package name.
"""
command = 'remove'
flag = '--purge'
r_cmd = self.base_command + ' ' + command + ' ' + flag + ' ' + name
try:
utils.system(r_cmd)
return True
except Exception:
return False
def add_repo(self, repo):
"""
Add an apt repository.
@param repo: Repository string. Example:
'deb http://archive.ubuntu.com/ubuntu/ maverick universe'
"""
repo_file = open(self.repo_file_path, 'a')
repo_file_contents = repo_file.read()
if repo not in repo_file_contents:
repo_file.write(repo)
def remove_repo(self, repo):
"""
Remove an apt repository.
@param repo: Repository string. Example:
'deb http://archive.ubuntu.com/ubuntu/ maverick universe'
"""
repo_file = open(self.repo_file_path, 'r')
new_file_contents = []
for line in repo_file.readlines:
if not line == repo:
new_file_contents.append(line)
repo_file.close()
new_file_contents = "\n".join(new_file_contents)
repo_file.open(self.repo_file_path, 'w')
repo_file.write(new_file_contents)
repo_file.close()
def upgrade(self):
"""
Upgrade all packages of the system with eventual new versions.
"""
ud_command = 'update'
ud_cmd = self.base_command + ' ' + ud_command
try:
utils.system(ud_cmd)
except Exception:
logging.error("Apt package update failed")
up_command = 'upgrade'
up_cmd = self.base_command + ' ' + up_command
try:
utils.system(up_cmd)
return True
except Exception:
return False
def provides(self, file):
"""
Return a list of packages that provide [file].
@param file: File path.
"""
if not self.check_installed('apt-file'):
self.install('apt-file')
command = os_dep.command('apt-file')
cache_update_cmd = command + ' update'
try:
utils.system(cache_update_cmd, ignore_status=True)
except Exception:
logging.error("Apt file cache update failed")
fu_cmd = command + ' search ' + file
try:
provides = utils.system_output(fu_cmd).split('\n')
list_provides = []
for line in provides:
if line:
try:
line = line.split(':')
package = line[0].strip()
path = line[1].strip()
if path == file and package not in list_provides:
list_provides.append(package)
except IndexError:
pass
if len(list_provides) > 1:
logging.warning('More than one package found, '
'opting by the first queue result')
if list_provides:
logging.info("Package %s provides %s", list_provides[0], file)
return list_provides[0]
return None
except Exception:
return None
if __name__ == '__main__':
parser = optparse.OptionParser(
"usage: %prog [install|remove|list-all|list-files|add-repo|remove-repo|"
"upgrade|what-provides|install-what-provides] arguments")
parser.add_option('--verbose', dest="debug", action='store_true',
help='include debug messages in console output')
options, args = parser.parse_args()
debug = options.debug
logging_manager.configure_logging(SoftwareManagerLoggingConfig(),
verbose=debug)
software_manager = SoftwareManager()
if args:
action = args[0]
args = " ".join(args[1:])
else:
action = 'show-help'
if action == 'install':
software_manager.install(args)
elif action == 'remove':
software_manager.remove(args)
if action == 'list-all':
software_manager.list_all()
elif action == 'list-files':
software_manager.list_files(args)
elif action == 'add-repo':
software_manager.add_repo(args)
elif action == 'remove-repo':
software_manager.remove_repo(args)
elif action == 'upgrade':
software_manager.upgrade()
elif action == 'what-provides':
software_manager.provides(args)
elif action == 'install-what-provides':
software_manager.install_what_provides(args)
elif action == 'show-help':
parser.print_help()
| libvirt/autotest | client/common_lib/software_manager.py | Python | gpl-2.0 | 23,931 |
import os
import numpy as np
np.random.seed(1337) # for reproducibility or os.getpid() for random
from keras.datasets import mnist
from keras.models import Sequential, Model
from keras.layers.convolutional import Conv2D
from keras.layers.pooling import MaxPooling2D
from keras.layers.normalization import BatchNormalization
from keras.layers import Flatten, Dense, Activation, Input
from keras.layers.merge import Add
from keras.utils import np_utils
def create_res_basicblock(input_shape, k):
x = Input(shape=(input_shape))
# residual path
residual = BatchNormalization(axis=1)(x)
residual = Activation('relu')(residual)
residual = Conv2D(k, (3,3), padding='same', use_bias=False, data_format='channels_first')(residual)
residual = BatchNormalization(axis=1)(residual)
residual = Activation('relu')(residual)
residual = Conv2D(k, (3,3), padding='same', use_bias=False, data_format='channels_first')(residual)
y = Add()([x, residual])
block = Model(inputs=[x], outputs=[y])
return block
# mnist input =28x28, 10 classes
def create_mnist_cnn():
model = Sequential()
......
model.add(Flatten())
model.add(Dense(100, activation='relu'))
model.add(Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
# load data and reshape the Tensors
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(X_train.shape[0], 1, 28, 28)
X_test = X_test.reshape(X_test.shape[0], 1, 28, 28)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, 10)
Y_test = np_utils.to_categorical(y_test, 10)
model = create_mnist_cnn()
print model.summary()
model.fit(X_train, Y_train, batch_size=64, epochs=5, verbose=1)
score = model.evaluate(X_test, Y_test, verbose=0)
print('Test score:', score[0])
print('Test accuracy:', score[1])
| kornjas/data-science | create_res_basicblock.py | Python | bsd-3-clause | 2,043 |
import os
from collections import defaultdict
import boto3
import click
from infra_buddy.aws.cloudformation import CloudFormationBuddy
from infra_buddy.commandline import cli
from infra_buddy.context.deploy_ctx import DeployContext
from infra_buddy.deploy.cloudformation_deploy import CloudFormationDeploy
from infra_buddy.template.template import NamedLocalTemplate
from infra_buddy.utility import print_utility
@cli.command(name='introspect', short_help="Search infra-buddy managed services for a resource.")
@click.option("--type-filter", help="Constrain search to a AWS resource type.")
@click.pass_obj
def deploy_cloudformation(deploy_ctx,type_filter):
# type: (DeployContext,str) -> None
do_command(deploy_ctx,type_filter)
def do_command(deploy_ctx,type_filter):
# type: (DeployContext,str) -> None
cf_buddy = CloudFormationBuddy(deploy_ctx=deploy_ctx)
stacks = cf_buddy.list_stacks(deploy_ctx.stack_name)
resources = cf_buddy.load_resources_for_stack_list(stacks)
for stack_name, resources in resources.items():
print_utility.banner("Stack: {}".format(stack_name))
for resource in resources:
if not type_filter or type_filter in resource['ResourceType']:
print_utility.info_banner("\tName: {}".format(resource['LogicalResourceId']))
print_utility.info_banner("\tType: {}".format(resource['ResourceType']))
| AlienVault-Engineering/infra-buddy | src/main/python/infra_buddy/commands/introspect/command.py | Python | apache-2.0 | 1,430 |
"""Nutanix Integration for Cortex XSOAR - Unit Tests file"""
import io
import json
from datetime import datetime
from typing import *
import pytest
from CommonServerPython import DemistoException, CommandResults
from NutanixHypervisor import Client
from NutanixHypervisor import USECS_ENTRIES_MAPPING
from NutanixHypervisor import nutanix_hypervisor_hosts_list_command, \
nutanix_hypervisor_vms_list_command, nutanix_hypervisor_vm_power_status_change_command, \
nutanix_hypervisor_task_results_get_command, nutanix_hpyervisor_alerts_list_command, \
nutanix_hypervisor_alert_acknowledge_command, nutanix_hypervisor_alert_resolve_command, \
nutanix_hypervisor_alerts_acknowledge_by_filter_command, \
nutanix_hypervisor_alerts_resolve_by_filter_command, get_alert_status_filter, \
get_optional_boolean_arg, convert_epoch_time_to_datetime, \
get_optional_time_parameter_as_epoch, add_iso_entries_to_dict, \
get_human_readable_headers, task_exists
MOCKED_BASE_URL = 'https://prefix:11111/PrismGateway/services/rest/v2.0'
client = Client(base_url=MOCKED_BASE_URL, verify=False, proxy=False, auth=('fake_username', 'fake_password'))
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
command_tests_data = util_load_json('test_data/test_command_data.json')
@pytest.mark.parametrize('args, argument_name, expected',
[({'resolved': 'true'}, 'resolved', True),
({'resolved': 'false'}, 'resolved', False),
({}, 'resolved', None),
])
def test_get_optional_boolean_arg_valid(args, argument_name, expected):
"""
Given:
- Demisto arguments.
- Argument name to extract from Demisto arguments as boolean.
When:
- Case a: Argument exists, and is true.
- Case b: Argument exists, and is false.
- Case b: Argument does not exist.
Then:
- Case a: Ensure that True is returned.
- Case b: Ensure that False is returned.
- Case c: Ensure that None is returned.
"""
assert (get_optional_boolean_arg(args, argument_name)) == expected
@pytest.mark.parametrize('args, argument_name, expected_error_message',
[({'resolved': 'unknown_boolean_value'}, 'resolved',
'Argument does not contain a valid boolean-like value'),
({'resolved': 123}, 'resolved',
'Argument is neither a string nor a boolean'),
])
def test_get_optional_boolean_arg_invalid_argument(args, argument_name, expected_error_message):
"""
Given:
- Demisto arguments.
- Argument name to extract from Demisto arguments as boolean.
When:
- Case a: Argument is a non boolean string.
- Case b: Argument is a number.
Then:
- Case a: Ensure that DemistoException is thrown with error message which indicates that string cannot be
parsed to boolean.
- Case b: Ensure that DemistoException is thrown with error message which indicates that type of the argument
is not bool or string that can be parsed.
"""
with pytest.raises(ValueError, match=expected_error_message):
get_optional_boolean_arg(args, argument_name)
@pytest.mark.parametrize('arg, expected',
[('2020-11-22T16:31:14', 1606062674000000),
(None, None),
])
def test_get_optional_time_parameter_valid_time_argument(arg, expected):
"""
Given:
- Demisto arguments.
- Argument of type time to extract from Demisto arguments as epoch time.
When:
- Case a: Argument exists, and has the expected date format.
- Case b: Argument does not exist.
Then:
- Case a: Ensure that the corresponding epoch time is returned.
- Case b: Ensure that None is returned.
"""
assert (get_optional_time_parameter_as_epoch(arg)) == expected
@pytest.mark.parametrize('command_function, args, url_suffix, response, expected',
[(nutanix_hypervisor_hosts_list_command,
command_tests_data['nutanix-hypervisor-hosts-list']['args'],
command_tests_data['nutanix-hypervisor-hosts-list']['suffix'],
command_tests_data['nutanix-hypervisor-hosts-list']['response'],
command_tests_data['nutanix-hypervisor-hosts-list']['expected']),
(nutanix_hypervisor_vms_list_command,
command_tests_data['nutanix-hypervisor-vms-list']['args'],
command_tests_data['nutanix-hypervisor-vms-list']['suffix'],
command_tests_data['nutanix-hypervisor-vms-list']['response'],
command_tests_data['nutanix-hypervisor-vms-list']['expected']),
(nutanix_hpyervisor_alerts_list_command,
command_tests_data['nutanix-hypervisor-alerts-list']['args'],
command_tests_data['nutanix-hypervisor-alerts-list']['suffix'],
command_tests_data['nutanix-hypervisor-alerts-list']['response'],
command_tests_data['nutanix-hypervisor-alerts-list']['expected'])
])
def test_commands_get_methods(requests_mock, command_function: Callable[[Client, Dict], CommandResults], args: Dict,
url_suffix: str, response: Dict, expected: Dict):
"""
Given:
- command function.
- Demisto arguments.
- url suffix of the Nutanix service endpoint that the command function will use (needed to mock the request).
- response returned from Nutanix.
- expected CommandResults object to be returned from the command function.
When:
- Executing a command
Then:
- Ensure that the expected CommandResults object is returned by the command function.
"""
requests_mock.get(
f'{MOCKED_BASE_URL}/{url_suffix}',
json=response
)
expected_command_results = CommandResults(
outputs_prefix=expected.get('outputs_prefix'),
outputs_key_field=expected.get('outputs_key_field'),
outputs=expected.get('outputs')
)
returned_command_results = command_function(client, args)
assert returned_command_results.outputs_prefix == expected_command_results.outputs_prefix
assert returned_command_results.outputs_key_field == expected_command_results.outputs_key_field
assert returned_command_results.outputs == expected_command_results.outputs
@pytest.mark.parametrize('command_function, args, url_suffix, response, expected',
[(nutanix_hypervisor_vm_power_status_change_command,
command_tests_data['nutanix-hypervisor-vm-powerstatus-change']['args'],
command_tests_data['nutanix-hypervisor-vm-powerstatus-change']['suffix'],
command_tests_data['nutanix-hypervisor-vm-powerstatus-change']['response'],
command_tests_data['nutanix-hypervisor-vm-powerstatus-change']['expected']),
(nutanix_hypervisor_task_results_get_command,
command_tests_data['nutanix-hypervisor-task-results-get']['args'],
command_tests_data['nutanix-hypervisor-task-results-get']['suffix'],
command_tests_data['nutanix-hypervisor-task-results-get']['response'],
command_tests_data['nutanix-hypervisor-task-results-get']['expected']),
(nutanix_hypervisor_alert_acknowledge_command,
command_tests_data['nutanix-hypervisor-alert-acknowledge']['args'],
command_tests_data['nutanix-hypervisor-alert-acknowledge']['suffix'],
command_tests_data['nutanix-hypervisor-alert-acknowledge']['response'],
command_tests_data['nutanix-hypervisor-alert-acknowledge']['expected']),
(nutanix_hypervisor_alert_resolve_command,
command_tests_data['nutanix-hypervisor-alert-resolve']['args'],
command_tests_data['nutanix-hypervisor-alert-resolve']['suffix'],
command_tests_data['nutanix-hypervisor-alert-resolve']['response'],
command_tests_data['nutanix-hypervisor-alert-resolve']['expected']),
(nutanix_hypervisor_alerts_acknowledge_by_filter_command,
command_tests_data['nutanix-hypervisor-alerts-acknowledge-by-filter']['args'],
command_tests_data['nutanix-hypervisor-alerts-acknowledge-by-filter']['suffix'],
command_tests_data['nutanix-hypervisor-alerts-acknowledge-by-filter']['response'],
command_tests_data['nutanix-hypervisor-alerts-acknowledge-by-filter']['expected']),
(nutanix_hypervisor_alerts_resolve_by_filter_command,
command_tests_data['nutanix-hypervisor-alerts-resolve-by-filter']['args'],
command_tests_data['nutanix-hypervisor-alerts-resolve-by-filter']['suffix'],
command_tests_data['nutanix-hypervisor-alerts-resolve-by-filter']['response'],
command_tests_data['nutanix-hypervisor-alerts-resolve-by-filter']['expected']),
])
def test_commands_post_methods(requests_mock, command_function: Callable[[Client, Dict], CommandResults], args: Dict,
url_suffix: str, response: Dict, expected: Dict):
"""
Given:
- command function.
- Demisto arguments.
- url suffix of the Nutanix service endpoint that the command function will use (needed to mock the request).
- response returned from Nutanix.
- expected CommandResults object to be returned from the command function.
When:
- Executing a command
Then:
- Ensure that the expected CommandResults object is returned by the command function.
"""
requests_mock.post(
f'{MOCKED_BASE_URL}/{url_suffix}',
json=response
)
expected_command_results = CommandResults(
outputs_prefix=expected.get('outputs_prefix'),
outputs_key_field=expected.get('outputs_key_field'),
outputs=expected.get('outputs')
)
returned_command_results = command_function(client, args)
assert returned_command_results.outputs_prefix == expected_command_results.outputs_prefix
assert returned_command_results.outputs_key_field == expected_command_results.outputs_key_field
assert returned_command_results.outputs == expected_command_results.outputs
def test_fetch_incidents(requests_mock):
"""
Given:
- Demisto parameters.
- Demisto arguments.
- Last run of fetch-incidents
When:
- Fetching incidents, not first run. last run fetch time is before both alerts.
Then:
Ensure that alerts are returned as incidents.
Ensure that last run is set with latest alert time stamp.
"""
last_run = {'last_fetch_epoch_time': 1610360118147914}
requests_mock.get(
f'{MOCKED_BASE_URL}/alerts?start_time_in_usecs=1610360118147914',
json=command_tests_data['nutanix-fetch-incidents']['response']
)
current_time = int(datetime.utcnow().timestamp() * 1000000)
incidents, next_run = client.fetch_incidents(
params={},
last_run=last_run
)
incidents_raw_json = [json.loads(incident['rawJSON']) for incident in incidents]
assert next_run.get('last_fetch_epoch_time') >= current_time
assert incidents_raw_json == command_tests_data['nutanix-fetch-incidents']['expected']['outputs']
@pytest.mark.parametrize('true_value, false_value, alert_status_filters, expected',
[('Resolved', 'Unresolved', ['Resolved', 'Acknowledged'], True),
('Resolved', 'Unresolved', ['Unresolved', 'Acknowledged'], False),
('Resolved', 'Unresolved', ['Acknowledged'], None),
('Resolved', 'Unresolved', None, None)
])
def test_get_alert_status_filter_valid_cases(true_value, false_value, alert_status_filters, expected):
"""
Given:
- The argument name which corresponds for True value inside 'alert_status_filters' list.
- The argument name which corresponds for False value inside 'alert_status_filters' list.
- Alert status filters, contains all the selects for filters done by user.
When:
- Case a: User selected argument that corresponds for True value.
- Case b: User selected argument that corresponds for False value.
- Case c: User did not select argument that corresponds to true or false value.
Then:
- Case a: Ensure True is returned.
- Case b: Ensure False is returned.
- Case c: Ensure None is returned.
"""
assert get_alert_status_filter(true_value, false_value, alert_status_filters) == expected
@pytest.mark.parametrize('true_value, false_value, alert_status_filters',
[('Resolved', 'Unresolved', ['Resolved', 'Unresolved']),
('Acknowledged', 'Unacknowledged', ['Acknowledged', 'Unacknowledged']),
('Auto Resolved', 'Not Auto Resolved', ['Auto Resolved', 'Not Auto Resolved'])
])
def test_get_alert_status_filter_invalid_case(true_value, false_value, alert_status_filters):
"""
Given:
- The argument name which corresponds for True value inside 'alert_status_filters' list.
- The argument name which corresponds for False value inside 'alert_status_filters' list.
- Alert status filters, contains all the selects for filters done by user.
When:
- Case a: User selected argument that corresponds for both True and False values.
- Case b: User selected argument that corresponds for both True and False values.
- Case c: User selected argument that corresponds for both True and False values.
Then:
- Case a: Ensure DemistoException is thrown with the expected message error.
- Case b: Ensure DemistoException is thrown with the expected message error.
- Case c: Ensure DemistoException is thrown with the expected message error.
"""
with pytest.raises(DemistoException,
match=f'Invalid alert status filters configurations, only one of {true_value},{false_value} '
'can be chosen.'):
get_alert_status_filter(true_value, false_value, alert_status_filters)
@pytest.mark.parametrize('epoch_time, expected',
[(0, None),
(None, None),
(1600000000000000, '2020-09-13T12:26:40+00:00')
])
def test_convert_epoch_time_to_datetime_valid_cases(epoch_time, expected):
"""
Given:
- Time to be converted to date time in UTC timezone.
When:
- Case a: Epoch time is 0.
- Case b: Epoch time is not given.
- Case c: Valid epoch time is given.
Then:
- Case a: Ensure None is returned.
- Case b: Ensure None is returned.
- Case c: Ensure the corresponding date time string is returned.
"""
assert convert_epoch_time_to_datetime(epoch_time) == expected
def test_add_iso_entries_to_dict():
"""
Given:
- Dict containing entries with epoch time.
When:
- Adding to entries with epoch time entries with iso time.
Then:
- All 'usecs' keys in the dict are replaced with 'iso time' entries with correct iso values.
"""
tested_dict = {usec_entry: 1600000000000000 for usec_entry in USECS_ENTRIES_MAPPING.keys()}
tested_dict['host_name'] = 'Nutanix Host'
add_iso_entries_to_dict([tested_dict])
assert tested_dict['host_name'] == 'Nutanix Host'
assert all(
tested_dict.get(iso_entry) == '2020-09-13T12:26:40+00:00' for iso_entry in USECS_ENTRIES_MAPPING.values())
assert len(tested_dict) == (1 + (len(USECS_ENTRIES_MAPPING) * 2))
@pytest.mark.parametrize('outputs, expected_outputs',
[([{1: 2, 3: 4, 'a': 'b'}], [1, 3, 'a']),
([{'a': {2: 3}}], []),
([{1: 2, 3: 4, 'a': {1: 2}}, {1: 2, 'abc': 'def', 'lst': [1, {2: 3}, 3, [4, 5, 6]]}], [1]),
([{'a': [[[[[[{1: 2}]]]]]]}], []),
([], [])
])
def test_get_human_readable_headers(outputs, expected_outputs):
"""
Given:
- List of outputs.
When:
- Creating human readable keys by given outputs
Then:
- All keys that don't contains inner dicts are returned.
"""
readable_headers = get_human_readable_headers(outputs)
assert all(readable_header in expected_outputs for readable_header in readable_headers)
assert len(readable_headers) == len(expected_outputs)
def test_task_id_exists_task_exists(requests_mock):
"""
Given:
- Task Id.
- Nutanix client.
When:
Task to be polled exists in Nutanix.
Then:
True is returned
"""
task_id = 'abcd1234-ab12-cd34-1a2s3d5f7hh4'
requests_mock.get(
f'{MOCKED_BASE_URL}/tasks/{task_id}',
json={}
)
assert task_exists(client, task_id)
def test_task_id_exists_task_does_not_exist(requests_mock):
"""
Given:
- Task Id.
- Nutanix client.
When:
Task to be polled does not exist in Nutanix.
Then:
False is returned
"""
task_id = 'abcd1234-ab12-cd34-1a2s3d5f7hh4'
requests_mock.get(
f'{MOCKED_BASE_URL}/tasks/{task_id}',
exc=DemistoException(f'Task with id {task_id} is not found')
)
assert not task_exists(client, task_id)
def test_task_id_exists_unexpected_exception(requests_mock):
"""
Given:
- Task Id.
- Nutanix client.
When:
Unexpected exception is thrown during call to Nutanix service.
Then:
The unexpected exception is raised and not passed silently
"""
task_id = 'abcd1234-ab12-cd34-1a2s3d5f7hh4'
requests_mock.get(
f'{MOCKED_BASE_URL}/tasks/{task_id}',
exc=DemistoException('Unexpected exception')
)
with pytest.raises(DemistoException, match='Unexpected exception'):
task_exists(client, task_id)
| VirusTotal/content | Packs/NutanixHypervisor/Integrations/NutanixHypervisor/NutanixHypervisor_test.py | Python | mit | 18,687 |
import sys
def query_yes_no(question, default="yes"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
"""
valid = {
"yes": True,
"y": True,
"no": False,
"n": False,
}
if default is None or default in list(valid.keys()):
prompt = " [y/n] "
else:
raise ValueError("Invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
if choice in valid:
return valid[choice]
sys.stdout.write("Please respond with one of the following ({}).\n"
.format(', '.join(sorted(list(valid.keys())))))
| eduNEXT/edunext-ecommerce | ecommerce/extensions/order/management/commands/prompt.py | Python | agpl-3.0 | 1,074 |
import bobo
@bobo.query('/')
def hello(person):
return 'Hello %s!' % person
| YuxuanLing/trunk | trunk/code/study/python/Fluent-Python-example-code/attic/functions/hello.py | Python | gpl-3.0 | 86 |
# Copyright 2007 Google Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Unit tests for group.py.
We only test what is overridden in the group subclasses, most
functionality is in base.py and tested in passwd_test.py since a
subclass is required to test the abstract class functionality.
"""
__author__ = 'vasilios@google.com (Vasilios Hoffman)'
import unittest
from nss_cache.maps import group
from nss_cache.maps import passwd
class TestGroupMap(unittest.TestCase):
"""Tests for the GroupMap class."""
def __init__(self, obj):
"""Set some default avalible data for testing."""
super(TestGroupMap, self).__init__(obj)
self._good_entry = group.GroupMapEntry()
self._good_entry.name = 'foo'
self._good_entry.passwd = 'x'
self._good_entry.gid = 10
self._good_entry.members = ['foo', 'bar']
def testInit(self):
"""Construct an empty or seeded GroupMap."""
self.assertEquals(group.GroupMap, type(group.GroupMap()),
msg='failed to create an empty GroupMap')
gmap = group.GroupMap([self._good_entry])
self.assertEquals(self._good_entry, gmap.PopItem(),
msg='failed to seed GroupMap with list')
self.assertRaises(TypeError, group.GroupMap, ['string'])
def testAdd(self):
"""Add throws an error for objects it can't verify."""
gmap = group.GroupMap()
entry = self._good_entry
self.assert_(gmap.Add(entry), msg='failed to append new entry.')
self.assertEquals(1, len(gmap), msg='unexpected size for Map.')
ret_entry = gmap.PopItem()
self.assertEquals(ret_entry, entry, msg='failed to pop correct entry.')
pentry = passwd.PasswdMapEntry()
pentry.name = 'foo'
pentry.uid = 10
pentry.gid = 10
self.assertRaises(TypeError, gmap.Add, pentry)
class TestGroupMapEntry(unittest.TestCase):
"""Tests for the GroupMapEntry class."""
def testInit(self):
"""Construct an empty and seeded GroupMapEntry."""
self.assert_(group.GroupMapEntry(),
msg='Could not create empty GroupMapEntry')
seed = {'name': 'foo', 'gid': 10}
entry = group.GroupMapEntry(seed)
self.assert_(entry.Verify(),
msg='Could not verify seeded PasswdMapEntry')
self.assertEquals(entry.name, 'foo',
msg='Entry returned wrong value for name')
self.assertEquals(entry.passwd, 'x',
msg='Entry returned wrong value for passwd')
self.assertEquals(entry.gid, 10,
msg='Entry returned wrong value for gid')
self.assertEquals(entry.members, [],
msg='Entry returned wrong value for members')
def testAttributes(self):
"""Test that we can get and set all expected attributes."""
entry = group.GroupMapEntry()
entry.name = 'foo'
self.assertEquals(entry.name, 'foo',
msg='Could not set attribute: name')
entry.passwd = 'x'
self.assertEquals(entry.passwd, 'x',
msg='Could not set attribute: passwd')
entry.gid = 10
self.assertEquals(entry.gid, 10,
msg='Could not set attribute: gid')
members = ['foo', 'bar']
entry.members = members
self.assertEquals(entry.members, members,
msg='Could not set attribute: members')
def testVerify(self):
"""Test that the object can verify it's attributes and itself."""
entry = group.GroupMapEntry()
# Empty object should bomb
self.failIf(entry.Verify())
def testKey(self):
"""Key() should return the value of the 'name' attribute."""
entry = group.GroupMapEntry()
entry.name = 'foo'
self.assertEquals(entry.Key(), entry.name)
if __name__ == '__main__':
unittest.main()
| UPPMAX/nsscache | nss_cache/maps/group_test.py | Python | gpl-2.0 | 4,427 |
#!/usr/bin/env python
from __future__ import division, print_function, absolute_import
from os.path import join
import sys
def configuration(parent_package='',top_path=None):
import numpy
from numpy.distutils.misc_util import Configuration
config = Configuration('sparse',parent_package,top_path,
setup_name = 'setupscons.py')
config.add_data_dir('tests')
config.add_subpackage('linalg')
config.add_subpackage('sparsetools')
config.add_subpackage('csgraph')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| sargas/scipy | scipy/sparse/setupscons.py | Python | bsd-3-clause | 656 |
# -*- coding: utf8 -*-
import logging
logger = logging.getLogger(__name__)
print(f"zeeguu_core initialized logger with name: {logger.name}")
logging.basicConfig(format="%(asctime)s %(levelname)s %(name)s %(message)s")
def info(msg):
logger.info(msg)
def debug(msg):
logger.debug(msg)
def log(msg):
info(msg)
def warning(msg):
logger.warning(msg)
def critical(msg):
logger.critical(msg)
def logp(msg):
log(msg)
print(msg)
| mircealungu/Zeeguu-Core | zeeguu_core/__init__.py | Python | mit | 462 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.