_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 31
13.1k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q300 | RGB_to_HSL | train | def RGB_to_HSL(cobj, *args, **kwargs):
"""
Converts from RGB to HSL.
H values are in degrees and are 0 to 360.
S values are a percentage, 0.0 to 1.0.
L values are a percentage, 0.0 to 1.0.
"""
var_R = cobj.rgb_r
var_G = cobj.rgb_g
var_B = cobj.rgb_b
var_max = max(var_R, var_G, var_B)
var_min = min(var_R, var_G, var_B)
var_H = __RGB_to_Hue(var_R, var_G, var_B, var_min, var_max)
var_L | python | {
"resource": ""
} |
q301 | __Calc_HSL_to_RGB_Components | train | def __Calc_HSL_to_RGB_Components(var_q, var_p, C):
"""
This is used in HSL_to_RGB conversions on R, G, and B.
"""
if C < 0:
C += 1.0
if C > 1:
C -= 1.0
# Computing C of vector (Color R, Color G, Color B)
if C < (1.0 / 6.0):
return var_p + ((var_q - var_p) * 6.0 * | python | {
"resource": ""
} |
q302 | HSV_to_RGB | train | def HSV_to_RGB(cobj, target_rgb, *args, **kwargs):
"""
HSV to RGB conversion.
H values are in degrees and are 0 to 360.
S values are a percentage, 0.0 to 1.0.
V values are a percentage, 0.0 to 1.0.
"""
H = cobj.hsv_h
S = cobj.hsv_s
V = cobj.hsv_v
h_floored = int(math.floor(H))
h_sub_i = int(h_floored / 60) % 6
var_f = (H / 60.0) - (h_floored // 60)
var_p = V * (1.0 - S)
var_q = V * (1.0 - var_f * S)
var_t = V * (1.0 - (1.0 - var_f) * S)
if h_sub_i == 0:
rgb_r = V
rgb_g = var_t
rgb_b = var_p
elif h_sub_i == 1:
rgb_r = var_q
rgb_g = V
rgb_b = var_p
elif h_sub_i == 2:
rgb_r = var_p
rgb_g = V
rgb_b = var_t
elif h_sub_i == 3:
rgb_r = var_p
rgb_g = var_q
rgb_b = V
elif h_sub_i == 4:
rgb_r = var_t
| python | {
"resource": ""
} |
q303 | HSL_to_RGB | train | def HSL_to_RGB(cobj, target_rgb, *args, **kwargs):
"""
HSL to RGB conversion.
"""
H = cobj.hsl_h
S = cobj.hsl_s
L = cobj.hsl_l
if L < 0.5:
var_q = L * (1.0 + S)
else:
var_q = L + S - (L * S)
var_p = 2.0 * L - var_q
# H normalized to range [0,1]
h_sub_k = (H / 360.0)
t_sub_R = h_sub_k + (1.0 / 3.0)
t_sub_G = h_sub_k
t_sub_B = h_sub_k - (1.0 / 3.0)
rgb_r = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_R)
rgb_g = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_G)
rgb_b = | python | {
"resource": ""
} |
q304 | RGB_to_CMY | train | def RGB_to_CMY(cobj, *args, **kwargs):
"""
RGB to CMY conversion.
NOTE: CMYK and CMY values range from 0.0 to 1.0
"""
cmy_c = 1.0 - cobj.rgb_r | python | {
"resource": ""
} |
q305 | CMY_to_RGB | train | def CMY_to_RGB(cobj, target_rgb, *args, **kwargs):
"""
Converts CMY to RGB via simple subtraction.
NOTE: Returned values are in the range of 0-255.
"""
rgb_r = | python | {
"resource": ""
} |
q306 | CMY_to_CMYK | train | def CMY_to_CMYK(cobj, *args, **kwargs):
"""
Converts from CMY to CMYK.
NOTE: CMYK and CMY values range from 0.0 to 1.0
"""
var_k = 1.0
if cobj.cmy_c < var_k:
var_k = cobj.cmy_c
if cobj.cmy_m < var_k:
var_k = cobj.cmy_m
if cobj.cmy_y < var_k:
var_k = cobj.cmy_y
if var_k == 1:
cmyk_c = 0.0
cmyk_m = 0.0
cmyk_y = 0.0
else:
| python | {
"resource": ""
} |
q307 | CMYK_to_CMY | train | def CMYK_to_CMY(cobj, *args, **kwargs):
"""
Converts CMYK to CMY.
NOTE: CMYK and CMY values range from 0.0 to 1.0
"""
cmy_c = cobj.cmyk_c * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
cmy_m = cobj.cmyk_m * (1.0 - | python | {
"resource": ""
} |
q308 | XYZ_to_IPT | train | def XYZ_to_IPT(cobj, *args, **kwargs):
"""
Converts XYZ to IPT.
NOTE: XYZ values need to be adapted to 2 degree D65
Reference:
Fairchild, M. D. (2013). Color appearance models, 3rd Ed. (pp. 271-272). John Wiley & Sons.
"""
if cobj.illuminant != 'd65' or cobj.observer != '2':
raise ValueError('XYZColor for XYZ->IPT conversion needs to be D65 adapted.')
xyz_values = numpy.array(cobj.get_value_tuple())
lms_values = numpy.dot(
| python | {
"resource": ""
} |
q309 | IPT_to_XYZ | train | def IPT_to_XYZ(cobj, *args, **kwargs):
"""
Converts IPT to XYZ.
"""
ipt_values = numpy.array(cobj.get_value_tuple())
lms_values = numpy.dot(
numpy.linalg.inv(IPTColor.conversion_matrices['lms_to_ipt']),
| python | {
"resource": ""
} |
q310 | convert_color | train | def convert_color(color, target_cs, through_rgb_type=sRGBColor,
target_illuminant=None, *args, **kwargs):
"""
Converts the color to the designated color space.
:param color: A Color instance to convert.
:param target_cs: The Color class to convert to. Note that this is not
an instance, but a class.
:keyword BaseRGBColor through_rgb_type: If during your conversion between
your original and target color spaces you have to pass through RGB,
this determines which kind of RGB to use. For example, XYZ->HSL.
You probably don't need to specify this unless you have a special
usage case.
:type target_illuminant: None or str
:keyword target_illuminant: If during conversion from RGB to a reflective
color space you want to explicitly end up with a certain illuminant,
pass this here. Otherwise the RGB space's native illuminant
will be used.
:returns: An instance of the type passed in as ``target_cs``.
:raises: :py:exc:`colormath.color_exceptions.UndefinedConversionError`
if conversion between the two color spaces isn't possible.
"""
if isinstance(target_cs, str):
raise ValueError("target_cs parameter must be a Color object.")
if not issubclass(target_cs, ColorBase):
raise ValueError("target_cs parameter must be a Color object.")
conversions = _conversion_manager.get_conversion_path(color.__class__, target_cs)
logger.debug('Converting %s to %s', color, target_cs)
logger.debug(' @ Conversion path: %s', conversions)
# Start with original color in case we convert to the same color space.
new_color = color
if issubclass(target_cs, BaseRGBColor):
# If the target_cs is an RGB color space of some sort, then we
# have to set our through_rgb_type to make sure the conversion returns
# the expected RGB colorspace (instead of defaulting to sRGBColor).
through_rgb_type = target_cs
# We have to be careful to use the same RGB color space that created
# an object (if it was created by a conversion) in order to get correct
# results. For example, XYZ->HSL via Adobe RGB should default to Adobe
# RGB when taking that generated HSL object back to XYZ.
# noinspection PyProtectedMember
if through_rgb_type != sRGBColor:
# User overrides take priority over everything.
# noinspection PyProtectedMember
target_rgb = through_rgb_type
elif color._through_rgb_type:
# Otherwise, a value on the color object is the next best thing,
# when available.
# noinspection PyProtectedMember
target_rgb = color._through_rgb_type
else: | python | {
"resource": ""
} |
q311 | Hunt.adjust_white_for_scc | train | def adjust_white_for_scc(cls, rgb_p, rgb_b, rgb_w, p):
"""
Adjust the white point for simultaneous chromatic contrast.
:param rgb_p: Cone signals of proxima field.
:param rgb_b: Cone signals of background.
:param rgb_w: Cone signals of reference white.
:param p: Simultaneous contrast/assimilation parameter.
| python | {
"resource": ""
} |
q312 | CIECAM02m1._compute_adaptation | train | def _compute_adaptation(self, xyz, xyz_w, f_l, d):
"""
Modified adaptation procedure incorporating simultaneous chromatic contrast from Hunt model.
:param xyz: Stimulus XYZ.
:param xyz_w: Reference white XYZ.
:param f_l: Luminance adaptation factor
:param d: Degree of adaptation.
:return: Tuple of adapted rgb and rgb_w arrays.
"""
# Transform input colors to cone responses
rgb = self._xyz_to_rgb(xyz)
logger.debug("RGB: {}".format(rgb))
rgb_b = self._xyz_to_rgb(self._xyz_b)
rgb_w = self._xyz_to_rgb(xyz_w)
rgb_w = Hunt.adjust_white_for_scc(rgb, rgb_b, rgb_w, self._p)
logger.debug("RGB_W: {}".format(rgb_w))
# Compute adapted tristimulus-responses
rgb_c = self._white_adaption(rgb, rgb_w, d)
logger.debug("RGB_C: {}".format(rgb_c))
rgb_cw = self._white_adaption(rgb_w, rgb_w, d) | python | {
"resource": ""
} |
q313 | OneTouch.validate_one_touch_signature | train | def validate_one_touch_signature(self, signature, nonce, method, url, params):
"""
Function to validate signature in X-Authy-Signature key of headers.
:param string signature: X-Authy-Signature key of headers.
:param string nonce: X-Authy-Signature-Nonce key of headers.
:param string method: GET or POST - configured in app settings for OneTouch.
:param string url: base callback url.
:param dict params: params sent by Authy.
:return bool: True if calculated signature and X-Authy-Signature are identical else False.
"""
if not signature or not isinstance(signature, str):
raise AuthyFormatException(
"Invalid signature - should not be empty. It is required")
if not nonce:
raise AuthyFormatException(
"Invalid nonce - should not be empty. It is required")
if not method or not ('get' == method.lower() or 'post' == method.lower()):
raise AuthyFormatException(
"Invalid method - should not be empty. It is required")
if not params or not isinstance(params, dict):
raise AuthyFormatException(
"Invalid params - should not be empty. It is required")
query_params = self.__make_http_query(params)
| python | {
"resource": ""
} |
q314 | compile | train | def compile(script, vars={}, library_paths=[]):
"""
Compile a jq script, retuning a script object.
library_paths is a list of strings that defines the module search path.
"""
| python | {
"resource": ""
} |
q315 | apply | train | def apply(script, value=None, vars={}, url=None, opener=default_opener, library_paths=[]):
"""
Transform value by script, returning all results as list.
| python | {
"resource": ""
} |
q316 | first | train | def first(script, value=None, default=None, vars={}, url=None, opener=default_opener, library_paths=[]):
"""
Transform object by jq script, returning the first result.
Return default if result is empty. | python | {
"resource": ""
} |
q317 | one | train | def one(script, value=None, vars={}, url=None, opener=default_opener, library_paths=[]):
"""
Transform object by jq script, returning | python | {
"resource": ""
} |
q318 | calculate_mypypath | train | def calculate_mypypath() -> List[str]:
"""Return MYPYPATH so that stubs have precedence over local sources."""
typeshed_root = None
count = 0
started = time.time()
for parent in itertools.chain(
# Look in current script's parents, useful for zipapps.
Path(__file__).parents,
# Look around site-packages, useful for virtualenvs.
Path(mypy.api.__file__).parents,
# Look in global paths, useful for globally installed.
Path(os.__file__).parents,
):
count += 1
candidate = parent / 'lib' / 'mypy' / 'typeshed'
if candidate.is_dir():
typeshed_root = candidate
break
# Also check the non-installed path, useful for `setup.py develop`.
candidate = parent / 'typeshed'
if candidate.is_dir():
typeshed_root = candidate
break
LOG.debug(
'Checked %d paths in %.2fs looking for typeshed. Found %s',
count,
time.time() - started,
| python | {
"resource": ""
} |
q319 | email_list_to_email_dict | train | def email_list_to_email_dict(email_list):
"""Convert a list of email to a dict of email."""
if email_list is None:
return {}
result = {}
for value in email_list:
| python | {
"resource": ""
} |
q320 | email_address_to_list | train | def email_address_to_list(email_address):
"""Convert an email address to a list."""
realname, address = | python | {
"resource": ""
} |
q321 | send | train | def send(sender_instance):
"""Send a transactional email using SendInBlue API.
Site: https://www.sendinblue.com
API: https://apidocs.sendinblue.com/
"""
m = Mailin(
"https://api.sendinblue.com/v2.0",
sender_instance._kwargs.get("api_key")
)
data = {
"to": email_list_to_email_dict(sender_instance._recipient_list),
"cc": email_list_to_email_dict(sender_instance._cc),
"bcc": email_list_to_email_dict(sender_instance._bcc),
"from": email_address_to_list(sender_instance._from_email),
"subject": sender_instance._subject,
}
if sender_instance._template.is_html:
data.update({
"html": sender_instance._message,
"headers": {"Content-Type": "text/html; charset=utf-8"}
| python | {
"resource": ""
} |
q322 | _mpv_coax_proptype | train | def _mpv_coax_proptype(value, proptype=str):
"""Intelligently coax the given python value into something that can be understood as a proptype property."""
if type(value) is bytes:
return value;
elif type(value) is bool:
return b'yes' if value else b'no'
elif proptype in (str, int, float):
| python | {
"resource": ""
} |
q323 | _make_node_str_list | train | def _make_node_str_list(l):
"""Take a list of python objects and make a MPV string node array from it.
As an example, the python list ``l = [ "foo", 23, false ]`` will result in the following MPV node object::
struct mpv_node {
.format = MPV_NODE_ARRAY,
.u.list = *(struct mpv_node_array){
.num = len(l),
.keys = NULL,
.values = struct mpv_node[len(l)] {
{ .format = MPV_NODE_STRING, .u.string = l[0] },
{ .format = MPV_NODE_STRING, .u.string = l[1] },
...
}
}
}
"""
char_ps = [ c_char_p(_mpv_coax_proptype(e, str)) for e in l ]
node_list = MpvNodeList(
num=len(l),
| python | {
"resource": ""
} |
q324 | MPV.wait_for_property | train | def wait_for_property(self, name, cond=lambda val: val, level_sensitive=True):
"""Waits until ``cond`` evaluates to a truthy value on the named property. This can be used to wait for
properties such as ``idle_active`` indicating the player is done with regular playback and just idling around
"""
sema = threading.Semaphore(value=0)
def observer(name, val):
if cond(val):
sema.release()
| python | {
"resource": ""
} |
q325 | MPV.terminate | train | def terminate(self):
"""Properly terminates this player instance. Preferably use this instead of relying on python's garbage
collector to cause this to be called from the object's destructor.
"""
self.handle, handle = None, self.handle
if threading.current_thread() is self._event_thread:
| python | {
"resource": ""
} |
q326 | MPV.command | train | def command(self, name, *args):
"""Execute a raw command."""
args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8'))
| python | {
"resource": ""
} |
q327 | MPV.property_observer | train | def property_observer(self, name):
"""Function decorator to register a property observer. See ``MPV.observe_property`` for details."""
def wrapper(fun):
self.observe_property(name, fun)
| python | {
"resource": ""
} |
q328 | MPV.unregister_message_handler | train | def unregister_message_handler(self, target_or_handler):
"""Unregister a mpv script message handler for the given script message target name.
You can also call the ``unregister_mpv_messages`` function attribute set on the handler function when it is
registered.
"""
if isinstance(target_or_handler, str): | python | {
"resource": ""
} |
q329 | MPV.message_handler | train | def message_handler(self, target):
"""Decorator to register a mpv script message handler.
WARNING: Only one handler can be registered at a time for any given target.
To unregister the message handler, call its ``unregister_mpv_messages`` function::
player = mpv.MPV()
@player.message_handler('foo')
def my_handler(some, args):
print(args)
my_handler.unregister_mpv_messages()
"""
| python | {
"resource": ""
} |
q330 | MPV.key_binding | train | def key_binding(self, keydef, mode='force'):
"""Function decorator to register a low-level key binding.
The callback function signature is ``fun(key_state, key_name)`` where ``key_state`` is either ``'U'`` for "key
up" or ``'D'`` for "key down".
The keydef format is: ``[Shift+][Ctrl+][Alt+][Meta+]<key>`` where ``<key>`` is either the literal character the
key produces (ASCII or Unicode character), or a symbolic name (as printed by ``mpv --input-keylist``).
To unregister the callback function, you can call its ``unregister_mpv_key_bindings`` attribute::
player = mpv.MPV()
@player.key_binding('Q')
def binding(state, name):
print('blep')
binding.unregister_mpv_key_bindings()
WARNING: For a single keydef only | python | {
"resource": ""
} |
q331 | MPV.register_key_binding | train | def register_key_binding(self, keydef, callback_or_cmd, mode='force'):
"""Register a key binding. This takes an mpv keydef and either a string containing a mpv command or a python
callback function. See ``MPV.key_binding`` for details.
"""
if not re.match(r'(Shift+)?(Ctrl+)?(Alt+)?(Meta+)?(.|\w+)', keydef):
raise ValueError('Invalid keydef. Expected format: [Shift+][Ctrl+][Alt+][Meta+]<key>\n'
'<key> is either the literal character the key produces (ASCII or Unicode character), or a '
'symbolic name (as printed by --input-keylist')
binding_name = MPV._binding_name(keydef)
if callable(callback_or_cmd):
self._key_binding_handlers[binding_name] = callback_or_cmd
self.register_message_handler('key-binding', self._handle_key_binding_message)
| python | {
"resource": ""
} |
q332 | MPV.unregister_key_binding | train | def unregister_key_binding(self, keydef):
"""Unregister a key binding by keydef."""
binding_name = MPV._binding_name(keydef)
self.command('disable-section', binding_name)
self.command('define-section', binding_name, '')
if binding_name in self._key_binding_handlers:
del | python | {
"resource": ""
} |
q333 | CORS._process_origin | train | def _process_origin(self, req, resp, origin):
"""Inspects the request and adds the Access-Control-Allow-Origin
header if the requested origin is allowed.
Returns:
``True`` if the header was added and the requested origin
is allowed, ``False`` if the origin is not allowed and the
header has not been added.
"""
if self._cors_config['allow_all_origins']:
if self.supports_credentials:
self._set_allow_origin(resp, origin)
else:
self._set_allow_origin(resp, '*')
| python | {
"resource": ""
} |
q334 | CORS._process_allow_headers | train | def _process_allow_headers(self, req, resp, requested_headers):
"""Adds the Access-Control-Allow-Headers header to the response,
using the cors settings to determine which headers are allowed.
Returns:
True if all the headers the client requested are allowed.
False if some or none of the headers the client requested are allowed.
"""
if not requested_headers:
return True
elif self._cors_config['allow_all_headers']:
self._set_allowed_headers(resp, requested_headers)
return True
approved_headers = []
for header in requested_headers:
if header.lower() in self._cors_config['allow_headers_list']:
| python | {
"resource": ""
} |
q335 | CORS._process_methods | train | def _process_methods(self, req, resp, resource):
"""Adds the Access-Control-Allow-Methods header to the response,
using the cors settings to determine which methods are allowed.
"""
requested_method = self._get_requested_method(req)
if not requested_method:
return False
if self._cors_config['allow_all_methods']:
allowed_methods = self._get_resource_methods(resource)
self._set_allowed_methods(resp, allowed_methods)
if requested_method in allowed_methods:
return True
elif requested_method in self._cors_config['allow_methods_list']:
resource_methods = self._get_resource_methods(resource)
| python | {
"resource": ""
} |
q336 | CORS._process_credentials | train | def _process_credentials(self, req, resp, origin):
"""Adds the Access-Control-Allow-Credentials to the response
if the cors settings indicates it should be set.
"""
if self._cors_config['allow_credentials_all_origins']:
self._set_allow_credentials(resp)
| python | {
"resource": ""
} |
q337 | EmailBackend._send | train | def _send(self, email_message):
"""Sends an individual message via the Amazon SES HTTP API.
Args:
email_message: A single Django EmailMessage object.
Returns:
True if the EmailMessage was sent successfully, otherwise False.
Raises:
ClientError: An interaction with the Amazon SES HTTP API
failed.
"""
pre_send.send(self.__class__, message=email_message)
if not email_message.recipients():
return False
from_email = sanitize_address(email_message.from_email,
email_message.encoding)
recipients = [sanitize_address(addr, email_message.encoding)
for addr in email_message.recipients()]
message = email_message.message().as_bytes(linesep='\r\n')
try:
result = self.conn.send_raw_email(
Source=from_email,
| python | {
"resource": ""
} |
q338 | was_modified_since | train | def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
| python | {
"resource": ""
} |
q339 | unpublish_object | train | def unpublish_object(content_type_pk, obj_pk):
"""
Unbuild all views related to a object and then sync to S3.
Accepts primary keys to retrieve a model object that
inherits bakery's BuildableModel class.
"""
ct = ContentType.objects.get_for_id(content_type_pk)
obj = ct.get_object_for_this_type(pk=obj_pk)
try:
# Unbuild the object
logger.info("unpublish_object task has received %s" | python | {
"resource": ""
} |
q340 | BuildableMixin.prep_directory | train | def prep_directory(self, target_dir):
"""
Prepares a new directory to store the file at the provided path, if needed.
"""
dirname = path.dirname(target_dir)
if dirname:
dirname = path.join(settings.BUILD_DIR, dirname)
| python | {
"resource": ""
} |
q341 | BuildableMixin.write_file | train | def write_file(self, target_path, html):
"""
Writes out the provided HTML to the provided path.
"""
logger.debug("Building to {}{}".format(self.fs_name, target_path))
| python | {
"resource": ""
} |
q342 | BuildableMixin.is_gzippable | train | def is_gzippable(self, path):
"""
Returns a boolean indicating if the provided file path is a candidate
for gzipping.
"""
# First check if gzipping is allowed by the global setting
if not getattr(settings, 'BAKERY_GZIP', False):
return False
# Then check if the content type of this | python | {
"resource": ""
} |
q343 | BuildableMixin.gzip_file | train | def gzip_file(self, target_path, html):
"""
Zips up the provided HTML as a companion for the provided path.
Intended to take advantage of the peculiarities of
Amazon S3's GZIP service.
mtime, an option that writes a timestamp to the output file
is set to 0, to avoid having s3cmd do unnecessary uploads because
of differences in the timestamp
"""
logger.debug("Gzipping to {}{}".format(self.fs_name, target_path))
# Write GZIP data to an in-memory buffer
data_buffer = six.BytesIO()
kwargs = dict(
filename=path.basename(target_path),
mode='wb',
| python | {
"resource": ""
} |
q344 | AutoPublishingBuildableModel.save | train | def save(self, *args, **kwargs):
"""
A custom save that publishes or unpublishes the object where
appropriate.
Save with keyword argument obj.save(publish=False) to skip the process.
"""
from bakery import tasks
from django.contrib.contenttypes.models import ContentType
# if obj.save(publish=False) has been passed, we skip everything.
if not kwargs.pop('publish', True):
super(AutoPublishingBuildableModel, self).save(*args, **kwargs)
# Otherwise, for the standard obj.save(), here we go...
else:
# First figure out if the record is an addition, or an edit of
# a preexisting record.
try:
preexisting = self.__class__.objects.get(pk=self.pk)
except self.__class__.DoesNotExist:
preexisting = None
# If this is an addition...
if not preexisting:
# We will publish if that's the boolean
if self.get_publication_status():
action = 'publish'
# Otherwise we will do nothing do nothing
else:
action = None
# If this is an edit...
else:
# If it's being unpublished...
if not self.get_publication_status() and \
preexisting.get_publication_status():
action = 'unpublish'
# If it's being published...
elif self.get_publication_status():
| python | {
"resource": ""
} |
q345 | AutoPublishingBuildableModel.delete | train | def delete(self, *args, **kwargs):
"""
Triggers a task that will unpublish the object after it is deleted.
Save with keyword argument obj.delete(unpublish=False) to skip it.
"""
from bakery import tasks
from django.contrib.contenttypes.models import ContentType
# if obj.save(unpublish=False) has been passed, we skip the task.
unpublish = kwargs.pop('unpublish', True)
| python | {
"resource": ""
} |
q346 | Command.handle | train | def handle(self, *args, **options):
"""
Making it happen.
"""
logger.info("Build started")
# Set options
self.set_options(*args, **options)
# Get the build directory ready
if not options.get("keep_build_dir"):
| python | {
"resource": ""
} |
q347 | Command.set_options | train | def set_options(self, *args, **options):
"""
Configure a few global options before things get going.
"""
self.verbosity = int(options.get('verbosity', 1))
# Figure out what build directory to use
if options.get("build_dir"):
self.build_dir = options.get("build_dir")
settings.BUILD_DIR = self.build_dir
else:
if not hasattr(settings, 'BUILD_DIR'):
raise CommandError(self.build_unconfig_msg)
self.build_dir = settings.BUILD_DIR
# Get the datatypes right so fs will be happy
self.build_dir = smart_text(self.build_dir)
self.static_root = smart_text(settings.STATIC_ROOT)
self.media_root = smart_text(settings.MEDIA_ROOT)
# Connect the BUILD_DIR with our filesystem backend
self.app = apps.get_app_config("bakery")
self.fs = self.app.filesystem
| python | {
"resource": ""
} |
q348 | Command.init_build_dir | train | def init_build_dir(self):
"""
Clear out the build directory and create a new one.
"""
# Destroy the build directory, if it exists
logger.debug("Initializing %s" % self.build_dir)
if self.verbosity > 1:
self.stdout.write("Initializing build directory")
| python | {
"resource": ""
} |
q349 | Command.build_static | train | def build_static(self, *args, **options):
"""
Builds the static files directory as well as robots.txt and favicon.ico
"""
logger.debug("Building static directory")
if self.verbosity > 1:
self.stdout.write("Building static directory")
management.call_command(
"collectstatic",
interactive=False,
verbosity=0
)
# Set the target directory inside the filesystem.
target_dir = path.join(
self.build_dir,
settings.STATIC_URL.lstrip('/')
)
target_dir = smart_text(target_dir)
if os.path.exists(self.static_root) and settings.STATIC_URL:
if getattr(settings, 'BAKERY_GZIP', False):
self.copytree_and_gzip(self.static_root, target_dir)
# if gzip isn't enabled, just copy the tree straight over
else:
logger.debug("Copying {}{} to {}{}".format("osfs://", self.static_root, self.fs_name, target_dir))
copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir)
# If they exist in the static directory, copy the robots.txt
# and favicon.ico files down to the root so they will work
# on the live website.
robots_src = path.join(target_dir, 'robots.txt')
| python | {
"resource": ""
} |
q350 | Command.build_media | train | def build_media(self):
"""
Build the media files.
"""
logger.debug("Building media directory")
if self.verbosity > 1:
self.stdout.write("Building media directory")
if os.path.exists(self.media_root) and settings.MEDIA_URL: | python | {
"resource": ""
} |
q351 | Command.build_views | train | def build_views(self):
"""
Bake out specified buildable views.
"""
# Then loop through and run them all
for view_str in self.view_list:
logger.debug("Building %s" % view_str)
if self.verbosity > 1:
| python | {
"resource": ""
} |
q352 | Command.copytree_and_gzip | train | def copytree_and_gzip(self, source_dir, target_dir):
"""
Copies the provided source directory to the provided target directory.
Gzips JavaScript, CSS and HTML and other files along the way.
"""
# Figure out what we're building...
build_list = []
# Walk through the source directory...
for (dirpath, dirnames, filenames) in os.walk(source_dir):
for f in filenames:
# Figure out what is going where
source_path = os.path.join(dirpath, f)
rel_path = os.path.relpath(dirpath, source_dir)
target_path = os.path.join(target_dir, rel_path, f)
# | python | {
"resource": ""
} |
q353 | Command.copyfile_and_gzip | train | def copyfile_and_gzip(self, source_path, target_path):
"""
Copies the provided file to the provided target directory.
Gzips JavaScript, CSS and HTML and other files along the way.
"""
# And then where we want to copy it to.
target_dir = path.dirname(target_path)
if not self.fs.exists(target_dir):
try:
self.fs.makedirs(target_dir)
except OSError:
pass
# determine the mimetype of the file
guess = mimetypes.guess_type(source_path)
content_type = guess[0]
encoding = guess[1]
# If it isn't a file want to gzip...
if content_type not in self.gzip_file_match:
# just copy it to the target.
logger.debug("Copying {}{} to {}{} because its filetype isn't on the whitelist".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))
# # if the file is already gzipped
elif encoding == 'gzip':
logger.debug("Copying {}{} to {}{} because it's already gzipped".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))
# If it is one we want to gzip...
else:
# ... let the world know ...
logger.debug("Gzipping {}{} to {}{}".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
| python | {
"resource": ""
} |
q354 | Command.set_options | train | def set_options(self, options):
"""
Configure all the many options we'll need to make this happen.
"""
self.verbosity = int(options.get('verbosity'))
# Will we be gzipping?
self.gzip = getattr(settings, 'BAKERY_GZIP', False)
# And if so what content types will we be gzipping?
self.gzip_content_types = getattr(
settings,
'GZIP_CONTENT_TYPES',
DEFAULT_GZIP_CONTENT_TYPES
)
# What ACL (i.e. security permissions) will be giving the files on S3?
self.acl = getattr(settings, 'DEFAULT_ACL', self.DEFAULT_ACL)
# Should we set cache-control headers?
self.cache_control = getattr(settings, 'BAKERY_CACHE_CONTROL', {})
# If the user specifies a build directory...
if options.get('build_dir'):
# ... validate that it is good.
if not os.path.exists(options.get('build_dir')):
raise CommandError(self.build_missing_msg)
# Go ahead and use it
self.build_dir = options.get("build_dir")
| python | {
"resource": ""
} |
q355 | Command.get_local_file_list | train | def get_local_file_list(self):
"""
Walk the local build directory and create a list of relative and
absolute paths to files.
"""
file_list = []
for (dirpath, dirnames, filenames) in os.walk(self.build_dir):
for fname in filenames:
# relative path, to sync with the S3 key
local_key = os.path.join(
os.path.relpath(dirpath, | python | {
"resource": ""
} |
q356 | Command.sync_with_s3 | train | def sync_with_s3(self):
"""
Walk through our self.local_files list, and match them with the list
of keys in the S3 bucket.
"""
# Create a list to put all the files we're going to update
self.update_list = []
# Figure out which files need to be updated and upload all these files
logger.debug("Comparing {} local files with {} bucket files".format(
len(self.local_file_list),
len(self.s3_obj_dict.keys())
))
if self.no_pooling:
[self.compare_local_file(f) for f in self.local_file_list]
else:
cpu_count = multiprocessing.cpu_count()
logger.debug("Pooling local file comparison on {} CPUs".format(cpu_count))
| python | {
"resource": ""
} |
q357 | Command.get_md5 | train | def get_md5(self, filename):
"""
Returns the md5 checksum of the provided file name.
"""
| python | {
"resource": ""
} |
q358 | Command.get_multipart_md5 | train | def get_multipart_md5(self, filename, chunk_size=8 * 1024 * 1024):
"""
Returns the md5 checksum of the provided file name after breaking it into chunks.
This is done to mirror the method used by Amazon S3 after a multipart upload.
"""
# Loop through the file contents ...
md5s = []
with open(filename, 'rb') as fp:
while True:
# Break it into chunks
data = fp.read(chunk_size)
# Finish when there are no more
if not data:
break
# Generate a md5 hash for each chunk
md5s.append(hashlib.md5(data))
# Combine the chunks
| python | {
"resource": ""
} |
q359 | Command.compare_local_file | train | def compare_local_file(self, file_key):
"""
Compares a local version of a file with what's already published.
If an update is needed, the file's key is added self.update_list.
"""
# Where is the file?
file_path = os.path.join(self.build_dir, file_key)
# If we're in force_publish mode just add it
if self.force_publish:
self.update_list.append((file_key, file_path))
# And quit now
return
# Does it exist in our s3 object list?
if file_key in self.s3_obj_dict:
# Get the md5 stored in Amazon's header
s3_md5 = self.s3_obj_dict[file_key].get('ETag').strip('"').strip("'")
# If there is a multipart ETag on S3, compare that to our local file after its chunked up.
# We are presuming this file was uploaded in multiple parts.
if "-" in s3_md5:
local_md5 = self.get_multipart_md5(file_path)
# Other, do it straight for the whole file
else:
local_md5 = self.get_md5(file_path)
| python | {
"resource": ""
} |
q360 | Command.upload_to_s3 | train | def upload_to_s3(self, key, filename):
"""
Set the content type and gzip headers if applicable
and upload the item to S3
"""
extra_args = {'ACL': self.acl}
# determine the mimetype of the file
guess = mimetypes.guess_type(filename)
content_type = guess[0]
encoding = guess[1]
if content_type:
extra_args['ContentType'] = content_type
# add the gzip headers, if necessary
if (self.gzip and content_type in self.gzip_content_types) or encoding == 'gzip':
| python | {
"resource": ""
} |
q361 | BuildableYearArchiveView.build_year | train | def build_year(self, dt):
"""
Build the page for the provided year.
"""
self.year = str(dt.year)
| python | {
"resource": ""
} |
q362 | BuildableDayArchiveView.get_day | train | def get_day(self):
"""
Return the day from the database in the format expected by the URL.
"""
year = super(BuildableDayArchiveView, self).get_year()
month = super(BuildableDayArchiveView, self).get_month()
| python | {
"resource": ""
} |
q363 | BuildableDayArchiveView.build_day | train | def build_day(self, dt):
"""
Build the page for the provided day.
"""
self.month = str(dt.month)
self.year = str(dt.year)
self.day = str(dt.day)
logger.debug("Building %s-%s-%s" % (self.year, | python | {
"resource": ""
} |
q364 | get_bucket_page | train | def get_bucket_page(page):
"""
Returns all the keys in a s3 bucket paginator page.
"""
key_list = page.get('Contents', [])
logger.debug("Retrieving page | python | {
"resource": ""
} |
q365 | batch_delete_s3_objects | train | def batch_delete_s3_objects(
keys,
aws_bucket_name,
chunk_size=100,
s3_client=None
):
"""
Utility method that batch deletes objects in given bucket.
"""
if s3_client is None:
s3_client, s3_resource = get_s3_client()
key_chunks = []
for i in range(0, len(keys), chunk_size):
chunk = []
| python | {
"resource": ""
} |
q366 | is_present | train | def is_present(p):
"""
Given a parser or string, make a parser that returns
True if the parser matches, False otherwise
"""
return | python | {
"resource": ""
} |
q367 | Parser.parse | train | def parse(self, stream):
"""Parse a string or list of tokens and return the | python | {
"resource": ""
} |
q368 | Parser.parse_partial | train | def parse_partial(self, stream):
"""
Parse the longest possible prefix of a given string.
Return a tuple of the result and the rest of the string,
or raise a ParseError.
"""
result = self(stream, 0)
| python | {
"resource": ""
} |
q369 | extract_key_values | train | def extract_key_values(array_value, separators=(';', ',', ':'), **kwargs):
"""Serialize array of objects with simple key-values
"""
items_sep, fields_sep, keys_sep = separators
| python | {
"resource": ""
} |
q370 | Flatson.from_schemafile | train | def from_schemafile(cls, schemafile):
"""Create a Flatson instance from a schemafile
"""
| python | {
"resource": ""
} |
q371 | Flatson.register_serialization_method | train | def register_serialization_method(self, name, serialize_func):
"""Register a custom serialization method that can be
used via schema configuration
| python | {
"resource": ""
} |
q372 | Flatson.flatten | train | def flatten(self, obj):
"""Return a list with the field values
"""
| python | {
"resource": ""
} |
q373 | Flatson.flatten_dict | train | def flatten_dict(self, obj):
"""Return an OrderedDict dict preserving order of keys in fieldnames
"""
| python | {
"resource": ""
} |
q374 | Connection.busy | train | def busy(self):
"""Return if the connection is currently executing a query or is locked
by a session that still exists.
:rtype: bool
"""
if self.handle.isexecuting():
| python | {
"resource": ""
} |
q375 | Connection.free | train | def free(self):
"""Remove the lock on the connection if the connection is not active
:raises: ConnectionBusyError
"""
LOGGER.debug('Connection %s freeing', self.id)
if self.handle.isexecuting():
| python | {
"resource": ""
} |
q376 | Connection.lock | train | def lock(self, session):
"""Lock the connection, ensuring that it is not busy and storing
a weakref for the session.
:param queries.Session session: The session to lock the connection with
:raises: ConnectionBusyError
"""
if self.busy:
| python | {
"resource": ""
} |
q377 | Pool.add | train | def add(self, connection):
"""Add a new connection to the pool
:param connection: The connection to add to the pool
:type connection: psycopg2.extensions.connection
:raises: PoolFullError
"""
if id(connection) in self.connections:
raise ValueError('Connection already exists in pool')
if len(self.connections) == self.max_size:
LOGGER.warning('Race condition found when adding new connection')
try:
connection.close()
except (psycopg2.Error, psycopg2.Warning) as error:
| python | {
"resource": ""
} |
q378 | Pool.clean | train | def clean(self):
"""Clean the pool by removing any closed connections and if the pool's
idle has exceeded its idle TTL, remove all connections.
"""
LOGGER.debug('Cleaning the pool')
for connection in [self.connections[k] for k in self.connections if
self.connections[k].closed]: | python | {
"resource": ""
} |
q379 | Pool.close | train | def close(self):
"""Close the pool by closing and removing all of the connections"""
for cid in list(self.connections.keys()):
| python | {
"resource": ""
} |
q380 | Pool.free | train | def free(self, connection):
"""Free the connection from use by the session that was using it.
:param connection: The connection to free
:type connection: psycopg2.extensions.connection
:raises: ConnectionNotFoundError
"""
LOGGER.debug('Pool %s freeing connection %s', self.id, id(connection))
try:
self.connection_handle(connection).free()
except | python | {
"resource": ""
} |
q381 | Pool.get | train | def get(self, session):
"""Return an idle connection and assign the session to the connection
:param queries.Session session: The session to assign
:rtype: psycopg2.extensions.connection
:raises: NoIdleConnectionsError
| python | {
"resource": ""
} |
q382 | Pool.idle_connections | train | def idle_connections(self):
"""Return a list of idle connections
:rtype: list
"""
| python | {
"resource": ""
} |
q383 | Pool.lock | train | def lock(self, connection, session):
"""Explicitly lock the specified connection
:type connection: psycopg2.extensions.connection
:param connection: The connection to lock
:param queries.Session session: The session to hold the lock
| python | {
"resource": ""
} |
q384 | Pool.remove | train | def remove(self, connection):
"""Remove the connection from the pool
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection
:raises: ConnectionNotFoundError
:raises: ConnectionBusyError
"""
cid = id(connection)
if cid not | python | {
"resource": ""
} |
q385 | Pool.report | train | def report(self):
"""Return a report about the pool state and configuration.
:rtype: dict
"""
return {
'connections': {
'busy': len(self.busy_connections),
'closed': len(self.closed_connections),
'executing': len(self.executing_connections),
'idle': len(self.idle_connections),
'locked': len(self.busy_connections)
},
'exceptions': sum([c.exceptions
| python | {
"resource": ""
} |
q386 | Pool.shutdown | train | def shutdown(self):
"""Forcefully shutdown the entire pool, closing all non-executing
connections.
:raises: ConnectionBusyError
"""
with self._lock:
for cid in list(self.connections.keys()):
if self.connections[cid].executing:
| python | {
"resource": ""
} |
q387 | PoolManager.add | train | def add(cls, pid, connection):
"""Add a new connection and session to a pool.
:param str pid: The pool id
:type connection: psycopg2.extensions.connection
:param connection: The connection to add to the pool | python | {
"resource": ""
} |
q388 | PoolManager.clean | train | def clean(cls, pid):
"""Clean the specified pool, removing any closed connections or
stale locks.
:param str pid: The pool id to clean
"""
with cls._lock:
try:
cls._ensure_pool_exists(pid)
| python | {
"resource": ""
} |
q389 | PoolManager.create | train | def create(cls, pid, idle_ttl=DEFAULT_IDLE_TTL, max_size=DEFAULT_MAX_SIZE,
time_method=None):
"""Create a new pool, with the ability to pass in values to override
the default idle TTL and the default maximum size.
A pool's idle TTL defines the amount of time that a pool can be open
without any sessions before it is removed.
A pool's max size defines the maximum number of connections that can
be added to the pool to prevent unbounded open connections.
:param str pid: The pool ID
:param int idle_ttl: Time in seconds for the idle TTL
:param int max_size: The maximum pool size
| python | {
"resource": ""
} |
q390 | PoolManager.free | train | def free(cls, pid, connection):
"""Free a connection that was locked by a session
:param str pid: The pool ID
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection
"""
with cls._lock:
| python | {
"resource": ""
} |
q391 | PoolManager.get | train | def get(cls, pid, session):
"""Get an idle, unused connection from the pool. Once a connection has
been retrieved, it will be marked as in-use until it is freed.
:param str | python | {
"resource": ""
} |
q392 | PoolManager.has_connection | train | def has_connection(cls, pid, connection):
"""Check to see if a pool has the specified connection
:param str pid: The pool ID
:param connection: The connection | python | {
"resource": ""
} |
q393 | PoolManager.has_idle_connection | train | def has_idle_connection(cls, pid):
"""Check to see if a pool has an idle connection
:param str pid: The pool ID
:rtype: bool
"""
with cls._lock:
| python | {
"resource": ""
} |
q394 | PoolManager.is_full | train | def is_full(cls, pid):
"""Return a bool indicating if the specified pool is full
:param str pid: The pool id
:rtype: bool
"""
with cls._lock:
| python | {
"resource": ""
} |
q395 | PoolManager.lock | train | def lock(cls, pid, connection, session):
"""Explicitly lock the specified connection in the pool
:param str pid: The pool id
:type connection: psycopg2.extensions.connection
| python | {
"resource": ""
} |
q396 | PoolManager.remove | train | def remove(cls, pid):
"""Remove a pool, closing all connections
:param str pid: The pool ID
| python | {
"resource": ""
} |
q397 | PoolManager.remove_connection | train | def remove_connection(cls, pid, connection):
"""Remove a connection from the pool, closing it if is open.
:param str pid: The pool ID
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection | python | {
"resource": ""
} |
q398 | PoolManager.set_idle_ttl | train | def set_idle_ttl(cls, pid, ttl):
"""Set the idle TTL for a pool, after which it will be destroyed.
:param str pid: The pool id
:param int ttl: The TTL for an idle pool
"""
| python | {
"resource": ""
} |
q399 | PoolManager.set_max_size | train | def set_max_size(cls, pid, size):
"""Set the maximum number of connections for the specified pool
:param str pid: The pool to set the size for
:param int size: The maximum number of connections
"""
| python | {
"resource": ""
} |