section stringlengths 2 30 | filename stringlengths 1 82 | text stringlengths 783 28M |
|---|---|---|
imageserver | views | #
# Quru Image Server
#
# Document: views.py
# Date started: 04 Apr 2011
# By: Matt Fozard
# Purpose: Raw image handling URLs and views
# Requires: Flask
# Copyright: Quru Ltd (www.quru.com)
# Licence:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/
#
# Last Changed: $Date$ $Rev$ by $Author$
#
# Notable modifications:
# Date By Details
# ========= ==== ============================================================
# 25 Nov 14 Matt v1.17 Fix ETags and implement HTTP 304
# 04 Feb 15 Matt Raise HTTP 415 for invalid images instead of HTTP 500
# 25 Mar 15 Matt v1.27 Raise HTTP 503 for unresponsive image requests
#
import io
import time
import flask
import werkzeug.exceptions as httpexc
from flask import make_response, request, send_file
from .errors import (
DBError,
DoesNotExistError,
ImageError,
SecurityError,
ServerTooBusyError,
)
from .filesystem_manager import path_exists
from .filesystem_sync import on_image_db_create_anon_history
from .flask_app import (
app,
data_engine,
image_engine,
logger,
permissions_engine,
stats_engine,
)
from .image_attrs import ImageAttrs
from .models import FolderPermission
from .session_manager import get_session_user
from .session_manager import logged_in as session_logged_in
from .util import (
default_value,
etag,
filepath_parent,
invoke_http_async,
parse_boolean,
parse_colour,
parse_float,
parse_int,
parse_tile_spec,
unicode_to_utf8,
validate_string,
)
from .views_util import log_security_error, safe_error_str
# Requires "EnableSendfile On" in the Apache conf,
# but hasn't (yet) been seen to deliver any performance improvement.
_USE_SENDFILE = False
# eRez compatibility URLs for raw image serving
@app.route("/erez/erez", methods=["GET"])
def erez_compat():
return image()
@app.route("/erez1/erez", methods=["GET"])
def erez1_compat():
return image()
@app.route("/erez2/erez", methods=["GET"])
def erez2_compat():
return image()
@app.route("/erez3/erez", methods=["GET"])
def erez3_compat():
return image()
@app.route("/erez4/erez", methods=["GET"])
def erez4_compat():
return image()
@app.route("/erez5/erez", methods=["GET"])
def erez5_compat():
return image()
# Raw image serving
@app.route("/image", methods=["GET"])
def image():
logger.debug(request.method + " " + request.url)
try:
logged_in = session_logged_in()
allow_uncache = app.config["BENCHMARKING"] or app.config["DEBUG"]
args = request.args
# Get URL parameters for the image
src = args.get("src", "")
page = args.get("page", None)
iformat = args.get("format", None)
template = args.get("tmp", None)
width = args.get("width", None)
height = args.get("height", None)
halign = args.get("halign", None)
valign = args.get("valign", None)
autosizefit = args.get("autosizefit", None)
rotation = args.get("angle", None)
flip = args.get("flip", None)
top = args.get("top", None)
left = args.get("left", None)
bottom = args.get("bottom", None)
right = args.get("right", None)
autocropfit = args.get("autocropfit", None)
fill = args.get("fill", None)
quality = args.get("quality", None)
sharpen = args.get("sharpen", None)
ov_src = args.get("overlay", None)
ov_size = args.get("ovsize", None)
ov_opacity = args.get("ovopacity", None)
ov_pos = args.get("ovpos", None)
icc_profile = args.get("icc", None)
icc_intent = args.get("intent", None)
icc_bpc = args.get("bpc", None)
colorspace = args.get("colorspace", None)
strip = args.get("strip", None)
dpi = args.get("dpi", None)
tile = args.get("tile", None)
# Get URL parameters for handling options
attach = args.get("attach", None)
xref = args.get("xref", None)
stats = args.get("stats", None)
# Get protected admin/internal parameters
cache = args.get("cache", "1") if logged_in or allow_uncache else "1"
recache = args.get("recache", None) if allow_uncache else None
# eRez compatibility mode
src = erez_params_compat(src)
# Convert non-string parameters to the correct data types
try:
# Image options
if page is not None:
page = parse_int(page)
if width is not None:
width = parse_int(width)
if height is not None:
height = parse_int(height)
if autosizefit is not None:
autosizefit = parse_boolean(autosizefit)
if rotation is not None:
rotation = parse_float(rotation)
if top is not None:
top = parse_float(top)
if left is not None:
left = parse_float(left)
if bottom is not None:
bottom = parse_float(bottom)
if right is not None:
right = parse_float(right)
if autocropfit is not None:
autocropfit = parse_boolean(autocropfit)
if fill is not None:
fill = parse_colour(fill)
if quality is not None:
quality = parse_int(quality)
if sharpen is not None:
sharpen = parse_int(sharpen)
if ov_size is not None:
ov_size = parse_float(ov_size)
if ov_opacity is not None:
ov_opacity = parse_float(ov_opacity)
if icc_bpc is not None:
icc_bpc = parse_boolean(icc_bpc)
if strip is not None:
strip = parse_boolean(strip)
if dpi is not None:
dpi = parse_int(dpi)
if tile is not None:
tile = parse_tile_spec(tile)
# Handling options
if attach is not None:
attach = parse_boolean(attach)
if xref is not None:
validate_string(xref, 0, 1024)
if stats is not None:
stats = parse_boolean(stats)
# Admin/internal options
if cache is not None:
cache = parse_boolean(cache)
if recache is not None:
recache = parse_boolean(recache)
except (ValueError, TypeError) as e:
raise httpexc.BadRequest(safe_error_str(e))
# Package and validate the parameters
try:
# #2694 Enforce public image limits - perform easy parameter checks
if not logged_in:
width, height, autosizefit = _public_image_limits_pre_image_checks(
width, height, autosizefit, tile, template
)
# Store and normalise all the parameters
image_attrs = ImageAttrs(
src,
-1,
page,
iformat,
template,
width,
height,
halign,
valign,
rotation,
flip,
top,
left,
bottom,
right,
autocropfit,
autosizefit,
fill,
quality,
sharpen,
ov_src,
ov_size,
ov_pos,
ov_opacity,
icc_profile,
icc_intent,
icc_bpc,
colorspace,
strip,
dpi,
tile,
)
image_engine.finalise_image_attrs(image_attrs)
except ValueError as e:
raise httpexc.BadRequest(safe_error_str(e))
# Get/create the database ID (from cache, validating path on create)
image_id = data_engine.get_or_create_image_id(
image_attrs.filename(),
return_deleted=False,
on_create=on_image_db_create_anon_history,
)
if image_id == 0:
raise DoesNotExistError() # Deleted
elif image_id < 0:
raise DBError("Failed to add image to database")
image_attrs.set_database_id(image_id)
# Require view permission or file admin
permissions_engine.ensure_folder_permitted(
image_attrs.folder_path(), FolderPermission.ACCESS_VIEW, get_session_user()
)
# Ditto for overlays
if ov_src:
permissions_engine.ensure_folder_permitted(
filepath_parent(ov_src),
FolderPermission.ACCESS_VIEW,
get_session_user(),
)
# v1.17 If this is a conditional request with an ETag, see if we can just return a 304
if "If-None-Match" in request.headers and not recache:
etag_valid, modified_time = _etag_is_valid(
image_attrs, request.headers["If-None-Match"], False
)
if etag_valid:
# Success HTTP 304
return make_304_response(image_attrs, False, modified_time)
# Get the requested image data
image_wrapper = image_engine.get_image(
image_attrs, "refresh" if recache else cache
)
if image_wrapper is None:
raise DoesNotExistError()
# #2694 Enforce public image limits - check the dimensions
# of images that passed the initial parameter checks
if not logged_in:
try:
_public_image_limits_post_image_checks(
image_attrs.width(),
image_attrs.height(),
image_attrs.template(),
image_wrapper.data(),
image_wrapper.attrs().format(),
)
except ValueError as e:
raise httpexc.BadRequest(safe_error_str(e)) # As for the pre-check
# Success HTTP 200
return make_image_response(image_wrapper, False, stats, attach, xref)
except httpexc.HTTPException:
# Pass through HTTP 4xx and 5xx
raise
except ServerTooBusyError:
logger.warning("503 Too busy for " + request.url)
raise httpexc.ServiceUnavailable()
except ImageError as e:
logger.warning("415 Invalid image file '" + src + "' : " + str(e))
raise httpexc.UnsupportedMediaType(safe_error_str(e))
except SecurityError as e:
if app.config["DEBUG"]:
raise
log_security_error(e, request)
raise httpexc.Forbidden()
except DoesNotExistError as e:
# First time around the ID will be set. Next time around it
# won't but we should check whether the disk file now exists.
if image_attrs.database_id() > 0 or path_exists(
image_attrs.filename(), require_file=True
):
image_engine.reset_image(image_attrs)
logger.warning("404 Not found: " + str(e))
raise httpexc.NotFound(safe_error_str(e))
except Exception as e:
if app.config["DEBUG"]:
raise
logger.error("500 Error for " + request.url + "\n" + str(e))
raise httpexc.InternalServerError(safe_error_str(e))
# Raw image serving - return the original unaltered image
@app.route("/original", methods=["GET"])
def original():
logger.debug("GET " + request.url)
try:
# Get URL parameters for the image
src = request.args.get("src", "")
# Get URL parameters for handling options
attach = request.args.get("attach", None)
xref = request.args.get("xref", None)
stats = request.args.get("stats", None)
# Validate the parameters
try:
if attach is not None:
attach = parse_boolean(attach)
if xref is not None:
validate_string(xref, 0, 1024)
if stats is not None:
stats = parse_boolean(stats)
image_attrs = ImageAttrs(src)
image_attrs.validate()
except ValueError as e:
raise httpexc.BadRequest(safe_error_str(e))
# Get/create the database ID (from cache, validating path on create)
image_id = data_engine.get_or_create_image_id(
image_attrs.filename(),
return_deleted=False,
on_create=on_image_db_create_anon_history,
)
if image_id == 0:
raise DoesNotExistError() # Deleted
elif image_id < 0:
raise DBError("Failed to add image to database")
image_attrs.set_database_id(image_id)
# Require download permission or file admin
permissions_engine.ensure_folder_permitted(
image_attrs.folder_path(),
FolderPermission.ACCESS_DOWNLOAD,
get_session_user(),
)
# v1.17 If this is a conditional request with an ETag, see if we can just return a 304
if "If-None-Match" in request.headers:
etag_valid, modified_time = _etag_is_valid(
image_attrs, request.headers["If-None-Match"], True
)
if etag_valid:
# Success HTTP 304
return make_304_response(image_attrs, True, modified_time)
# Read the image file
image_wrapper = image_engine.get_image_original(image_attrs)
if image_wrapper is None:
raise DoesNotExistError()
# Success HTTP 200
return make_image_response(image_wrapper, True, stats, attach, xref)
except httpexc.HTTPException:
# Pass through HTTP 4xx and 5xx
raise
except ServerTooBusyError:
logger.warning("503 Too busy for " + request.url)
raise httpexc.ServiceUnavailable()
except ImageError as e:
logger.warning("415 Invalid image file '" + src + "' : " + str(e))
raise httpexc.UnsupportedMediaType(safe_error_str(e))
except SecurityError as e:
if app.config["DEBUG"]:
raise
log_security_error(e, request)
raise httpexc.Forbidden()
except DoesNotExistError as e:
# First time around the ID will be set. Next time around it
# won't but we should check whether the disk file now exists.
if image_attrs.database_id() > 0 or path_exists(
image_attrs.filename(), require_file=True
):
image_engine.reset_image(image_attrs)
logger.warning("404 Not found: " + src)
raise httpexc.NotFound(src)
except Exception as e:
if app.config["DEBUG"]:
raise
logger.error("500 Error for " + request.url + "\n" + str(e))
raise httpexc.InternalServerError(safe_error_str(e))
def erez_params_compat(src):
"""
Performs adjustments to URL parameters to provide compatibility with eRez
"""
if src.endswith(".tif") and src[-10:-4].rfind(".") != -1:
src = src[0:-4]
elif src.endswith(".tiff") and src[-11:-5].rfind(".") != -1:
src = src[0:-5]
return src
def handle_image_xref(xref):
"""
Invokes the configured 3rd party URL (if any) for the given tracking reference.
"""
xurl = app.config["XREF_TRACKING_URL"]
if xref and xurl:
if xurl.startswith("http"):
invoke_http_async(
xurl + xref,
log_success_fn=logger.debug if app.config["DEBUG"] else None,
log_fail_fn=logger.error,
)
else:
logger.warning("XREF_TRACKING_URL must begin with http or https")
def make_image_response(
image_wrapper, is_original, stats=None, as_attachment=None, xref=None
):
"""
Returns a Flask response object for the given image and response options,
handles the tracking ID if there is one, and writes view statistics for the
image.
image_wrapper - An ImageWrapper containing the image data to return.
is_original - Whether to count this response as a "download original" function.
If True, logs download statistics instead of a view.
stats - Optional override for whether to enable or disable image statistics.
Uses the setting in image_wrapper when None.
as_attachment - Optional override for whether to provide the
Content-Disposition HTTP header (with filename).
Uses the setting in image_wrapper when None.
xref - Optional external URL to call.
"""
image_attrs = image_wrapper.attrs()
# Process xref if there is one
if xref:
handle_image_xref(xref)
# Create the HTTP response
if _USE_SENDFILE:
response = send_file(io.BytesIO(image_wrapper.data()), image_attrs.mime_type())
else:
response = make_response(image_wrapper.data())
response.mimetype = image_attrs.mime_type()
# Set the browser caching headers
_add_http_caching_headers(
response,
image_attrs,
image_wrapper.last_modified_time(),
image_wrapper.client_expiry_time(),
)
# Set custom cache info header
response.headers["X-From-Cache"] = str(image_wrapper.is_from_cache())
# URL attachment param overrides what the returned object wants
attach = (
as_attachment if as_attachment is not None else image_wrapper.is_attachment()
)
if is_original or attach:
fname = image_attrs.filename(with_path=False, replace_format=True)
fname = unicode_to_utf8(fname)
cd_type = "attachment" if attach else "inline"
response.headers["Content-Disposition"] = cd_type + '; filename="' + fname + '"'
if app.config["DEBUG"]:
logger.debug(
"Sending "
+ str(len(image_wrapper.data()))
+ " bytes for "
+ str(image_attrs)
)
_log_stats(
image_attrs.database_id(),
len(image_wrapper.data()),
is_original,
image_wrapper.is_from_cache(),
image_wrapper.record_stats() if stats is None else stats,
)
return response
def make_304_response(image_attrs, is_original, last_modified_time):
"""
Returns a HTTP 304 "Not Modified" Flask response object for the given image.
image_attrs - An ImageAttrs containing the image specification.
is_original - Whether to count this response as a "download original" function.
If True, logs download statistics instead of a view.
last_modified_time - The image's last modification time as number of
seconds since the epoch.
"""
# Create a blank response with no content
response = flask.Response(status=304)
# We have to set the same caching headers again
# http://stackoverflow.com/a/4393499/1671320
_add_http_caching_headers(
response,
image_attrs,
last_modified_time,
default_value(
image_engine.get_image_template(image_attrs).expiry_secs(),
image_engine.DEFAULT_EXPIRY_SECS,
),
)
if app.config["DEBUG"]:
logger.debug("Sending 304 Not Modified for " + str(image_attrs))
_log_stats(image_attrs.database_id(), 0, is_original, False)
return response
def _add_http_caching_headers(
response, image_attrs, last_modified_time, expiry_seconds
):
"""
Sets the standard client-side cache control headers expected for an HTTP
200 or 304 response. The last modified time should be given as number of
seconds since the epoch. The expiry time is as described for ImageWrapper.
"""
# This (and others below) auto-converted to correct format by Werkzeug
response.date = time.time()
if expiry_seconds != 0:
if expiry_seconds > 0:
response.cache_control.public = True
response.cache_control.max_age = expiry_seconds
response.expires = int(time.time() + expiry_seconds)
else:
response.cache_control.public = True
response.cache_control.no_cache = True
response.expires = 0
if expiry_seconds >= 0:
response.headers["ETag"] = etag(
str(last_modified_time), image_attrs.get_cache_key()
)
def _etag_is_valid(image_attrs, check_etag, is_original):
"""
Returns a tuple of (True, last_modified_time) if the current ETag for the
image described by image_attrs matches the given ETag.
Returns (False, new_modified_time) if the current ETag value is different.
"""
modified_time = (
image_engine.get_image_original_modified_time(image_attrs)
if is_original
else image_engine.get_image_modified_time(image_attrs)
)
if modified_time == 0:
# Return False to re-generate the image and re-store the modified time
return (False, 0)
current_etag = etag(str(modified_time), image_attrs.get_cache_key())
return ((current_etag == check_etag), modified_time)
def _public_image_limits_pre_image_checks(
req_width, req_height, req_autosizefit, req_tile, req_template
):
"""
To be called when no one is logged in, enforces the image dimension limits
defined by the PUBLIC_MAX_IMAGE_WIDTH and PUBLIC_MAX_IMAGE_HEIGHT settings.
If a template is specified, the template dimensions take precedence.
Or if no dimensions were requested, returns default value(s) for them.
Returns a tuple of replacement (width, height, autosizefit) values that
should be used for the rest of the image request.
Raises a ValueError if the requested image dimensions would exceed the
defined limits.
"""
limit_w = app.config["PUBLIC_MAX_IMAGE_WIDTH"] or 0
limit_h = app.config["PUBLIC_MAX_IMAGE_HEIGHT"] or 0
if (limit_w or limit_h) and req_tile is None:
logger.debug(
"Public image limits, checking parameters vs %d x %d limit"
% (limit_w, limit_h)
)
# For v1 only, v2 will get these from a default template
default_w = limit_w
default_h = limit_h
# If we're using a template, get the template dimensions
template_w = 0
template_h = 0
if req_template:
try:
templ = image_engine.get_template(req_template)
template_w = templ.get_image_attrs().width() or 0
template_h = templ.get_image_attrs().height() or 0
except ValueError:
# Validation (yet to come) will reject the bad template name
pass
# v1.32.1 - if template contradicts the limit, template takes precedence
if limit_w and template_w and template_w > limit_w:
limit_w = template_w
if limit_h and template_h and template_h > limit_h:
limit_h = template_h
# Check the requested size vs the limits
if req_width and limit_w and req_width > limit_w:
raise ValueError("width: exceeds public image limit")
if req_height and limit_h and req_height > limit_h:
raise ValueError("height: exceeds public image limit")
# Check if we need to size-limit an otherwise unlimited image request
# Note: In v2 this will be done with new default values in ImageAttrs
if not req_width and not req_height and not template_w and not template_h:
req_width = default_w if default_w else 0
req_height = default_h if default_h else 0
# Unless explicitly set otherwise, prevent padding
if req_width and req_height and req_autosizefit is None:
req_autosizefit = True
logger.debug(
"Public image limits, unsized image set as %d x %d"
% (req_width, req_height)
)
return req_width, req_height, req_autosizefit
def _public_image_limits_post_image_checks(
req_width, req_height, req_template, image_data, image_format
):
"""
To be called when no one is logged in, checks that the image actually
generated conforms to the limits defined by the PUBLIC_MAX_IMAGE_WIDTH
and PUBLIC_MAX_IMAGE_HEIGHT settings.
As an optimisation, this function only has any effect for the conditions
that would not have been caught by the "pre-image" checks.
Specifically, this is when either:
* only PUBLIC_MAX_IMAGE_WIDTH is set, but only an image height was given
or
* only PUBLIC_MAX_IMAGE_HEIGHT is set, but only an image width was given
Raises a ValueError if the generated image dimensions have exceeded the
defined limits.
"""
if not req_template:
limit_w = app.config["PUBLIC_MAX_IMAGE_WIDTH"] or 0
limit_h = app.config["PUBLIC_MAX_IMAGE_HEIGHT"] or 0
# We have to inspect the image, so only do this for the 2 conditions
# that the pre-image checks couldn't do
if (limit_w and not limit_h and req_height and not req_width) or (
limit_h and not limit_w and req_width and not req_height
):
logger.debug("Public image limits, checking generated image dimensions")
image_w, image_h = image_engine.get_image_data_dimensions(
image_data, image_format
)
logger.debug(
"Public image limits, generated image is %d x %d" % (image_w, image_h)
)
if image_w and image_h:
if limit_w and image_w > limit_w:
raise ValueError("width: exceeds public image limit")
if limit_h and image_h > limit_h:
raise ValueError("height: exceeds public image limit")
def _log_stats(image_id, data_len, is_original, from_cache, write_image_stats=True):
"""
Logs statistics about an image request/response with the stats manager.
Specify an image ID of 0 to update only the system statistics.
Specify a data length of 0 for 'Not Modified' responses.
The write_image_stats flag is passed straight through to the stats manager.
"""
duration_secs = 0
if "request_started" in flask.g:
duration_secs = time.time() - flask.g.request_started
if data_len > 0:
if is_original:
stats_engine.log_download(
image_id, data_len, duration_secs, write_image_stats
)
else:
stats_engine.log_view(
image_id, data_len, from_cache, duration_secs, write_image_stats
)
else:
stats_engine.log_request(image_id, duration_secs, write_image_stats)
|
streamlink-cli | streamrunner | import errno
import logging
import sys
from contextlib import suppress
from pathlib import Path
from threading import Event, Lock, Thread
from typing import Optional
from streamlink.stream.stream import StreamIO
from streamlink_cli.output import FileOutput, HTTPOutput, Output, PlayerOutput
from streamlink_cli.utils.progress import Progress
# Use the main Streamlink CLI module as logger
log = logging.getLogger("streamlink.cli")
ACCEPTABLE_ERRNO = errno.EPIPE, errno.EINVAL, errno.ECONNRESET
with suppress(AttributeError):
ACCEPTABLE_ERRNO += (errno.WSAECONNABORTED,) # type: ignore[assignment,attr-defined]
def _noop(_):
return None
class _ReadError(BaseException):
pass
class PlayerPollThread(Thread):
"""
Poll the player process in a separate thread, to isolate it from the stream's read-loop in the main thread.
Reading the stream can stall indefinitely when filtering content.
"""
POLLING_INTERVAL: float = 0.5
def __init__(self, stream: StreamIO, output: PlayerOutput):
super().__init__(daemon=True, name=self.__class__.__name__)
self._stream = stream
self._output = output
self._stop_polling = Event()
self._lock = Lock()
def close(self):
self._stop_polling.set()
def playerclosed(self):
# Ensure that "Player closed" does only get logged once, either when writing the read stream data has failed,
# or when the player process was terminated/killed before writing.
with self._lock:
if self._stop_polling.is_set():
return
self.close()
log.info("Player closed")
def poll(self) -> bool:
return self._output.player.poll() is None
def run(self) -> None:
while not self._stop_polling.wait(self.POLLING_INTERVAL):
if self.poll():
continue
self.playerclosed()
# close stream as soon as the player was closed
self._stream.close()
break
class StreamRunner:
"""Read data from a stream and write it to the output."""
playerpoller: Optional[PlayerPollThread] = None
progress: Optional[Progress] = None
def __init__(
self,
stream: StreamIO,
output: Output,
show_progress: bool = False,
):
self.stream = stream
self.output = output
self.is_http = isinstance(output, HTTPOutput)
filename: Optional[Path] = None
if isinstance(output, PlayerOutput):
self.playerpoller = PlayerPollThread(stream, output)
if output.record:
filename = output.record.filename
elif isinstance(output, FileOutput):
if output.filename:
filename = output.filename
elif output.record:
filename = output.record.filename
if filename and show_progress:
self.progress = Progress(sys.stderr, filename)
def run(
self,
prebuffer: bytes,
chunk_size: int = 8192,
) -> None:
read = self.stream.read
write = self.output.write
progress = self.progress.write if self.progress else _noop
if self.playerpoller:
self.playerpoller.start()
if self.progress:
self.progress.start()
# TODO: Fix error messages (s/when/while/) and only log "Stream ended" when it ended on its own (data == b"").
# These are considered breaking changes of the CLI output, which is parsed by 3rd party tools.
try:
write(prebuffer)
progress(prebuffer)
del prebuffer
# Don't check for stream.closed, so the buffer's contents can be fully read after the stream ended or was closed
while True:
try:
data = read(chunk_size)
if data == b"":
break
except OSError as err:
raise _ReadError() from err
write(data)
progress(data)
except _ReadError as err:
raise OSError(f"Error when reading from stream: {err.__context__}, exiting") from err.__context__
except OSError as err:
if self.playerpoller and err.errno in ACCEPTABLE_ERRNO:
self.playerpoller.playerclosed()
elif self.is_http and err.errno in ACCEPTABLE_ERRNO:
log.info("HTTP connection closed")
else:
raise OSError(f"Error when writing to output: {err}, exiting") from err
finally:
if self.playerpoller:
self.playerpoller.close()
self.playerpoller.join()
if self.progress:
self.progress.close()
self.progress.join()
self.stream.close()
log.info("Stream ended")
|
extractor | dlive | from __future__ import unicode_literals
import json
import re
from ..utils import int_or_none
from .common import InfoExtractor
class DLiveVODIE(InfoExtractor):
IE_NAME = "dlive:vod"
_VALID_URL = (
r"https?://(?:www\.)?dlive\.tv/p/(?P<uploader_id>.+?)\+(?P<id>[^/?#&]+)"
)
_TESTS = [
{
"url": "https://dlive.tv/p/pdp+3mTzOl4WR",
"info_dict": {
"id": "3mTzOl4WR",
"ext": "mp4",
"title": "Minecraft with james charles epic",
"upload_date": "20190701",
"timestamp": 1562011015,
"uploader_id": "pdp",
},
},
{
"url": "https://dlive.tv/p/pdpreplay+D-RD-xSZg",
"only_matching": True,
},
]
def _real_extract(self, url):
uploader_id, vod_id = re.match(self._VALID_URL, url).groups()
broadcast = self._download_json(
"https://graphigo.prd.dlive.tv/",
vod_id,
data=json.dumps(
{
"query": """query {
pastBroadcast(permlink:"%s+%s") {
content
createdAt
length
playbackUrl
title
thumbnailUrl
viewCount
}
}"""
% (uploader_id, vod_id)
}
).encode(),
)["data"]["pastBroadcast"]
title = broadcast["title"]
formats = self._extract_m3u8_formats(
broadcast["playbackUrl"], vod_id, "mp4", "m3u8_native"
)
self._sort_formats(formats)
return {
"id": vod_id,
"title": title,
"uploader_id": uploader_id,
"formats": formats,
"description": broadcast.get("content"),
"thumbnail": broadcast.get("thumbnailUrl"),
"timestamp": int_or_none(broadcast.get("createdAt"), 1000),
"view_count": int_or_none(broadcast.get("viewCount")),
}
class DLiveStreamIE(InfoExtractor):
IE_NAME = "dlive:stream"
_VALID_URL = r"https?://(?:www\.)?dlive\.tv/(?!p/)(?P<id>[\w.-]+)"
def _real_extract(self, url):
display_name = self._match_id(url)
user = self._download_json(
"https://graphigo.prd.dlive.tv/",
display_name,
data=json.dumps(
{
"query": """query {
userByDisplayName(displayname:"%s") {
livestream {
content
createdAt
title
thumbnailUrl
watchingCount
}
username
}
}"""
% display_name
}
).encode(),
)["data"]["userByDisplayName"]
livestream = user["livestream"]
title = livestream["title"]
username = user["username"]
formats = self._extract_m3u8_formats(
"https://live.prd.dlive.tv/hls/live/%s.m3u8" % username, display_name, "mp4"
)
self._sort_formats(formats)
return {
"id": display_name,
"title": self._live_title(title),
"uploader": display_name,
"uploader_id": username,
"formats": formats,
"description": livestream.get("content"),
"thumbnail": livestream.get("thumbnailUrl"),
"is_live": True,
"timestamp": int_or_none(livestream.get("createdAt"), 1000),
"view_count": int_or_none(livestream.get("watchingCount")),
}
|
cssutils | prodparser | # -*- coding: utf-8 -*-
"""Productions parser used by css and stylesheets classes to parse
test into a cssutils.util.Seq and at the same time retrieving
additional specific cssutils.util.Item objects for later use.
TODO:
- ProdsParser
- handle EOF or STOP?
- handle unknown @rules
- handle S: maybe save to Seq? parameterized?
- store['_raw']: always?
- Sequence:
- opt first(), naive impl for now
"""
__all__ = ['ProdParser', 'Sequence', 'Choice', 'Prod', 'PreDef']
__docformat__ = 'restructuredtext'
__version__ = '$Id: parse.py 1418 2008-08-09 19:27:50Z cthedot $'
import re
import string
import sys
import cssutils
from helper import pushtoken
class ParseError(Exception):
"""Base Exception class for ProdParser (used internally)."""
pass
class Done(ParseError):
"""Raised if Sequence or Choice is finished and no more Prods left."""
pass
class Exhausted(ParseError):
"""Raised if Sequence or Choice is finished but token is given."""
pass
class Missing(ParseError):
"""Raised if Sequence or Choice is not finished but no matching token given."""
pass
class NoMatch(ParseError):
"""Raised if nothing in Sequence or Choice does match."""
pass
class Choice(object):
"""A Choice of productions (Sequence or single Prod)."""
def __init__(self, *prods, **options):
"""
*prods
Prod or Sequence objects
options:
optional=False
"""
self._prods = prods
try:
self.optional = options['optional']
except KeyError, e:
for p in self._prods:
if p.optional:
self.optional = True
break
else:
self.optional = False
self.reset()
def reset(self):
"""Start Choice from zero"""
self._exhausted = False
def matches(self, token):
"""Check if token matches"""
for prod in self._prods:
if prod.matches(token):
return True
return False
def nextProd(self, token):
"""
Return:
- next matching Prod or Sequence
- ``None`` if any Prod or Sequence is optional and no token matched
- raise ParseError if nothing matches and all are mandatory
- raise Exhausted if choice already done
``token`` may be None but this occurs when no tokens left."""
if not self._exhausted:
optional = False
for x in self._prods:
if x.matches(token):
self._exhausted = True
x.reset()
return x
elif x.optional:
optional = True
else:
if not optional:
# None matched but also None is optional
raise ParseError(u'No match in %s' % self)
elif token:
raise Exhausted(u'Extra token')
def __str__(self):
return u'Choice(%s)' % u', '.join([str(x) for x in self._prods])
class Sequence(object):
"""A Sequence of productions (Choice or single Prod)."""
def __init__(self, *prods, **options):
"""
*prods
Prod or Sequence objects
**options:
minmax = lambda: (1, 1)
callback returning number of times this sequence may run
"""
self._prods = prods
try:
minmax = options['minmax']
except KeyError:
minmax = lambda: (1, 1)
self._min, self._max = minmax()
if self._max is None:
# unlimited
try:
# py2.6/3
self._max = sys.maxsize
except AttributeError:
# py<2.6
self._max = sys.maxint
self._prodcount = len(self._prods)
self.reset()
def matches(self, token):
"""Called by Choice to try to find if Sequence matches."""
for prod in self._prods:
if prod.matches(token):
return True
try:
if not prod.optional:
break
except AttributeError:
pass
return False
def reset(self):
"""Reset this Sequence if it is nested."""
self._roundstarted = False
self._i = 0
self._round = 0
def _currentName(self):
"""Return current element of Sequence, used by name"""
# TODO: current impl first only if 1st if an prod!
for prod in self._prods[self._i:]:
if not prod.optional:
return str(prod)
else:
return 'Sequence'
optional = property(lambda self: self._min == 0)
def nextProd(self, token):
"""Return
- next matching Prod or Choice
- raises ParseError if nothing matches
- raises Exhausted if sequence already done
"""
while self._round < self._max:
# for this round
i = self._i
round = self._round
p = self._prods[i]
if i == 0:
self._roundstarted = False
# for next round
self._i += 1
if self._i == self._prodcount:
self._round += 1
self._i = 0
if p.matches(token):
self._roundstarted = True
# reset nested Choice or Prod to use from start
p.reset()
return p
elif p.optional:
continue
elif round < self._min:
raise Missing(u'Missing token for production %s' % p)
elif not token:
if self._roundstarted:
raise Missing(u'Missing token for production %s' % p)
else:
raise Done()
else:
raise NoMatch(u'No matching production for token')
if token:
raise Exhausted(u'Extra token')
def __str__(self):
return u'Sequence(%s)' % u', '.join([str(x) for x in self._prods])
class Prod(object):
"""Single Prod in Sequence or Choice."""
def __init__(self, name, match, optional=False,
toSeq=None, toStore=None,
stop=False, stopAndKeep=False,
nextSor=False, mayEnd=False,
storeToken=None,
exception=None):
"""
name
name used for error reporting
match callback
function called with parameters tokentype and tokenvalue
returning True, False or raising ParseError
toSeq callback (optional) or False
calling toSeq(token, tokens) returns (type_, val) == (token[0], token[1])
to be appended to seq else simply unaltered (type_, val)
if False nothing is added
toStore (optional)
key to save util.Item to store or callback(store, util.Item)
optional = False
wether Prod is optional or not
stop = False
if True stop parsing of tokens here
stopAndKeep
if True stop parsing of tokens here but return stopping
token in unused tokens
nextSor=False
next is S or other like , or / (CSSValue)
mayEnd = False
no token must follow even defined by Sequence.
Used for operator ',/ ' currently only
storeToken = None
if True toStore saves simple token tuple and not and Item object
to store. Old style processing, TODO: resolve
exception = None
exception to be raised in case of error, normaly SyntaxErr
"""
self._name = name
self.match = match
self.optional = optional
self.stop = stop
self.stopAndKeep = stopAndKeep
self.nextSor = nextSor
self.mayEnd = mayEnd
self.storeToken = storeToken
self.exception = exception
def makeToStore(key):
"Return a function used by toStore."
def toStore(store, item):
"Set or append store item."
if key in store:
_v = store[key]
if not isinstance(_v, list):
store[key] = [_v]
store[key].append(item)
else:
store[key] = item
return toStore
if toSeq or toSeq is False:
# called: seq.append(toSeq(value))
self.toSeq = toSeq
else:
self.toSeq = lambda t, tokens: (t[0], t[1])
if hasattr(toStore, '__call__'):
self.toStore = toStore
elif toStore:
self.toStore = makeToStore(toStore)
else:
# always set!
self.toStore = None
def matches(self, token):
"""Return if token matches."""
if not token:
return False
type_, val, line, col = token
return self.match(type_, val)
def reset(self):
pass
def __str__(self):
return self._name
def __repr__(self):
return "<cssutils.prodsparser.%s object name=%r at 0x%x>" % (
self.__class__.__name__, self._name, id(self))
# global tokenizer as there is only one!
tokenizer = cssutils.tokenize2.Tokenizer()
class ProdParser(object):
"""Productions parser."""
def __init__(self, clear=True):
self.types = cssutils.cssproductions.CSSProductions
self._log = cssutils.log
if clear:
tokenizer.clear()
def _texttotokens(self, text):
"""Build a generator which is the only thing that is parsed!
old classes may use lists etc
"""
if isinstance(text, basestring):
# DEFAULT, to tokenize strip space
return tokenizer.tokenize(text.strip())
elif isinstance(text, tuple):
# OLD: (token, tokens) or a single token
if len(text) == 2:
# (token, tokens)
chain([token], tokens)
else:
# single token
return iter([text])
elif isinstance(text, list):
# OLD: generator from list
return iter(text)
else:
# DEFAULT, already tokenized, assume generator
return text
def _SorTokens(self, tokens, until=',/'):
"""New tokens generator which has S tokens removed,
if followed by anything in ``until``, normally a ``,``."""
for token in tokens:
if token[0] == self.types.S:
try:
next_ = tokens.next()
except StopIteration:
yield token
else:
if next_[1] in until:
# omit S as e.g. ``,`` has been found
yield next_
elif next_[0] == self.types.COMMENT:
# pass COMMENT
yield next_
else:
yield token
yield next_
elif token[0] == self.types.COMMENT:
# pass COMMENT
yield token
else:
yield token
break
# normal mode again
for token in tokens:
yield token
def parse(self, text, name, productions, keepS=False, store=None):
"""
text (or token generator)
to parse, will be tokenized if not a generator yet
may be:
- a string to be tokenized
- a single token, a tuple
- a tuple of (token, tokensGenerator)
- already tokenized so a tokens generator
name
used for logging
productions
used to parse tokens
keepS
if WS should be added to Seq or just be ignored
store UPDATED
If a Prod defines ``toStore`` the key defined there
is a key in store to be set or if store[key] is a list
the next Item is appended here.
TODO: NEEDED? :
Key ``raw`` is always added and holds all unprocessed
values found
returns
:wellformed: True or False
:seq: a filled cssutils.util.Seq object which is NOT readonly yet
:store: filled keys defined by Prod.toStore
:unusedtokens: token generator containing tokens not used yet
"""
tokens = self._texttotokens(text)
if not tokens:
self._log.error(u'No content to parse.')
# TODO: return???
seq = cssutils.util.Seq(readonly=False)
if not store: # store for specific values
store = {}
prods = [productions] # stack of productions
wellformed = True
# while no real token is found any S are ignored
started = False
stopall = False
prod = None
# flag if default S handling should be done
defaultS = True
while True:
try:
token = tokens.next()
except StopIteration:
break
type_, val, line, col = token
# default productions
if type_ == self.types.COMMENT:
# always append COMMENT
seq.append(cssutils.css.CSSComment(val),
cssutils.css.CSSComment, line, col)
elif defaultS and type_ == self.types.S:
# append S (but ignore starting ones)
if not keepS or not started:
continue
else:
seq.append(val, type_, line, col)
# elif type_ == self.types.ATKEYWORD:
# # @rule
# r = cssutils.css.CSSUnknownRule(cssText=val)
# seq.append(r, type(r), line, col)
elif type_ == self.types.INVALID:
# invalidate parse
wellformed = False
self._log.error(u'Invalid token: %r' % (token,))
break
elif type_ == 'EOF':
# do nothing? (self.types.EOF == True!)
pass
else:
started = True # check S now
nextSor = False # reset
try:
while True:
# find next matching production
try:
prod = prods[-1].nextProd(token)
except (Exhausted, NoMatch), e:
# try next
prod = None
if isinstance(prod, Prod):
# found actual Prod, not a Choice or Sequence
break
elif prod:
# nested Sequence, Choice
prods.append(prod)
else:
# nested exhausted, try in parent
if len(prods) > 1:
prods.pop()
else:
raise ParseError('No match')
except ParseError, e:
wellformed = False
self._log.error(u'%s: %s: %r' % (name, e, token))
break
else:
# process prod
if prod.toSeq and not prod.stopAndKeep:
type_, val = prod.toSeq(token, tokens)
if val is not None:
seq.append(val, type_, line, col)
if prod.toStore:
if not prod.storeToken:
prod.toStore(store, seq[-1])
else:
# workaround for now for old style token
# parsing!
# TODO: remove when all new style
prod.toStore(store, token)
if prod.stop: # EOF?
# stop here and ignore following tokens
break
if prod.stopAndKeep: # e.g. ;
# stop here and ignore following tokens
# but keep this token for next run
tokenizer.push(token)
stopall = True
break
if prod.nextSor:
# following is S or other token (e.g. ",")?
# remove S if
tokens = self._SorTokens(tokens, ',/')
defaultS = False
else:
defaultS = True
lastprod = prod
if not stopall:
# stop immediately
while True:
# all productions exhausted?
try:
prod = prods[-1].nextProd(token=None)
except Done, e:
# ok
prod = None
except Missing, e:
prod = None
# last was a S operator which may End a Sequence, then ok
if hasattr(lastprod, 'mayEnd') and not lastprod.mayEnd:
wellformed = False
self._log.error(u'%s: %s' % (name, e))
except ParseError, e:
prod = None
wellformed = False
self._log.error(u'%s: %s' % (name, e))
else:
if prods[-1].optional:
prod = None
elif prod and prod.optional:
# ignore optional
continue
if prod and not prod.optional:
wellformed = False
self._log.error(u'%s: Missing token for production %r'
% (name, str(prod)))
break
elif len(prods) > 1:
# nested exhausted, next in parent
prods.pop()
else:
break
# trim S from end
seq.rstrip()
return wellformed, seq, store, tokens
class PreDef(object):
"""Predefined Prod definition for use in productions definition
for ProdParser instances.
"""
types = cssutils.cssproductions.CSSProductions
reHexcolor = re.compile(r'^\#(?:[0-9abcdefABCDEF]{3}|[0-9abcdefABCDEF]{6})$')
@staticmethod
def calc(toSeq=None, nextSor=False):
return Prod(name=u'calcfunction',
match=lambda t, v: u'calc(' == cssutils.helper.normalize(v),
toSeq=toSeq,
nextSor=nextSor)
@staticmethod
def char(name='char', char=u',', toSeq=None,
stop=False, stopAndKeep=False,
optional=True, nextSor=False):
"any CHAR"
return Prod(name=name, match=lambda t, v: v == char, toSeq=toSeq,
stop=stop, stopAndKeep=stopAndKeep, optional=optional,
nextSor=nextSor)
@staticmethod
def comma():
return PreDef.char(u'comma', u',')
@staticmethod
def dimension(nextSor=False, stop=False):
return Prod(name=u'dimension',
match=lambda t, v: t == PreDef.types.DIMENSION,
toSeq=lambda t, tokens: (t[0], cssutils.helper.normalize(t[1])),
stop=stop,
nextSor=nextSor)
@staticmethod
def function(toSeq=None, nextSor=False):
return Prod(name=u'function',
match=lambda t, v: t == PreDef.types.FUNCTION,
toSeq=toSeq,
nextSor=nextSor)
@staticmethod
def funcEnd(stop=False):
")"
return PreDef.char(u'end FUNC ")"', u')',
stop=stop)
@staticmethod
def hexcolor(stop=False, nextSor=False):
"#123 or #123456"
return Prod(name='HEX color',
match=lambda t, v: (
t == PreDef.types.HASH and
PreDef.reHexcolor.match(v)
),
stop=stop,
nextSor=nextSor)
@staticmethod
def ident(stop=False, toStore=None, nextSor=False):
return Prod(name=u'ident',
match=lambda t, v: t == PreDef.types.IDENT,
stop=stop,
toStore=toStore,
nextSor=nextSor)
@staticmethod
def number(stop=False, toSeq=None, nextSor=False):
return Prod(name=u'number',
match=lambda t, v: t == PreDef.types.NUMBER,
stop=stop,
toSeq=toSeq,
nextSor=nextSor)
@staticmethod
def percentage(stop=False, toSeq=None, nextSor=False):
return Prod(name=u'percentage',
match=lambda t, v: t == PreDef.types.PERCENTAGE,
stop=stop,
toSeq=toSeq,
nextSor=nextSor)
@staticmethod
def string(stop=False, nextSor=False):
"string delimiters are removed by default"
return Prod(name=u'string',
match=lambda t, v: t == PreDef.types.STRING,
toSeq=lambda t, tokens: (t[0], cssutils.helper.stringvalue(t[1])),
stop=stop,
nextSor=nextSor)
@staticmethod
def S(name=u'whitespace', toSeq=None, optional=False):
return Prod(name=name,
match=lambda t, v: t == PreDef.types.S,
toSeq=toSeq,
optional=optional,
mayEnd=True)
@staticmethod
def unary(stop=False, toSeq=None, nextSor=False):
"+ or -"
return Prod(name=u'unary +-', match=lambda t, v: v in (u'+', u'-'),
optional=True,
stop=stop,
toSeq=toSeq,
nextSor=nextSor)
@staticmethod
def uri(stop=False, nextSor=False):
"'url(' and ')' are removed and URI is stripped"
return Prod(name=u'URI',
match=lambda t, v: t == PreDef.types.URI,
toSeq=lambda t, tokens: (t[0], cssutils.helper.urivalue(t[1])),
stop=stop,
nextSor=nextSor)
@staticmethod
def unicode_range(stop=False, nextSor=False):
"u+123456-abc normalized to lower `u`"
return Prod(name='unicode-range',
match=lambda t, v: t == PreDef.types.UNICODE_RANGE,
toSeq=lambda t, tokens: (t[0], t[1].lower()),
stop=stop,
nextSor=nextSor
)
@staticmethod
def variable(toSeq=None, stop=False, nextSor=False):
return Prod(name=u'variable',
match=lambda t, v: u'var(' == cssutils.helper.normalize(v),
toSeq=toSeq,
stop=stop,
nextSor=nextSor)
# used for MarginRule for now:
@staticmethod
def unknownrule(name=u'@', toStore=None):
"""@rule dummy (matches ATKEYWORD to remove unknown rule tokens from
stream::
@x;
@x {...}
no nested yet!
"""
def rule(tokens):
saved = []
for t in tokens:
saved.append(t)
if (t[1] == u'}' or t[1] == u';'):
return cssutils.css.CSSUnknownRule(saved)
return Prod(name=name,
match=lambda t, v: t == u'ATKEYWORD',
toSeq=lambda t, tokens: (u'CSSUnknownRule',
rule(pushtoken(t, tokens))
),
toStore=toStore
)
|
forms | admin | """ using django model forms """
import datetime
from bookwyrm import models
from django import forms
from django.core.exceptions import PermissionDenied
from django.forms import widgets
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django_celery_beat.models import IntervalSchedule
from .custom_form import CustomForm, StyledForm
# pylint: disable=missing-class-docstring
class ExpiryWidget(widgets.Select):
def value_from_datadict(self, data, files, name):
"""human-readable expiration time buckets"""
selected_string = super().value_from_datadict(data, files, name)
if selected_string == "day":
interval = datetime.timedelta(days=1)
elif selected_string == "week":
interval = datetime.timedelta(days=7)
elif selected_string == "month":
interval = datetime.timedelta(days=31) # Close enough?
elif selected_string == "forever":
return None
else:
return selected_string # This will raise
return timezone.now() + interval
class CreateInviteForm(CustomForm):
class Meta:
model = models.SiteInvite
exclude = ["code", "user", "times_used", "invitees"]
widgets = {
"expiry": ExpiryWidget(
choices=[
("day", _("One Day")),
("week", _("One Week")),
("month", _("One Month")),
("forever", _("Does Not Expire")),
]
),
"use_limit": widgets.Select(
choices=[(i, _(f"{i} uses")) for i in [1, 5, 10, 25, 50, 100]]
+ [(None, _("Unlimited"))]
),
}
class SiteForm(CustomForm):
class Meta:
model = models.SiteSettings
fields = [
"name",
"instance_tagline",
"instance_description",
"instance_short_description",
"default_theme",
"code_of_conduct",
"privacy_policy",
"impressum",
"show_impressum",
"logo",
"logo_small",
"favicon",
"support_link",
"support_title",
"admin_email",
"footer_item",
]
widgets = {
"instance_short_description": forms.TextInput(
attrs={"aria-describedby": "desc_instance_short_description"}
),
}
class RegistrationForm(CustomForm):
class Meta:
model = models.SiteSettings
fields = [
"allow_registration",
"allow_invite_requests",
"registration_closed_text",
"invite_request_text",
"invite_request_question",
"invite_question_text",
"require_confirm_email",
"default_user_auth_group",
]
widgets = {
"require_confirm_email": forms.CheckboxInput(
attrs={"aria-describedby": "desc_require_confirm_email"}
),
"invite_request_text": forms.Textarea(
attrs={"aria-describedby": "desc_invite_request_text"}
),
}
class RegistrationLimitedForm(CustomForm):
class Meta:
model = models.SiteSettings
fields = [
"registration_closed_text",
"invite_request_text",
"invite_request_question",
"invite_question_text",
]
widgets = {
"invite_request_text": forms.Textarea(
attrs={"aria-describedby": "desc_invite_request_text"}
),
}
class ThemeForm(CustomForm):
class Meta:
model = models.Theme
fields = ["name", "path"]
widgets = {
"name": forms.TextInput(attrs={"aria-describedby": "desc_name"}),
"path": forms.TextInput(
attrs={
"aria-describedby": "desc_path",
"placeholder": "css/themes/theme-name.scss",
}
),
}
class AnnouncementForm(CustomForm):
class Meta:
model = models.Announcement
exclude = ["remote_id"]
widgets = {
"preview": forms.TextInput(attrs={"aria-describedby": "desc_preview"}),
"content": forms.Textarea(attrs={"aria-describedby": "desc_content"}),
"event_date": forms.SelectDateWidget(
attrs={"aria-describedby": "desc_event_date"}
),
"start_date": forms.SelectDateWidget(
attrs={"aria-describedby": "desc_start_date"}
),
"end_date": forms.SelectDateWidget(
attrs={"aria-describedby": "desc_end_date"}
),
"active": forms.CheckboxInput(attrs={"aria-describedby": "desc_active"}),
}
class EmailBlocklistForm(CustomForm):
class Meta:
model = models.EmailBlocklist
fields = ["domain"]
widgets = {
"avatar": forms.TextInput(attrs={"aria-describedby": "desc_domain"}),
}
class IPBlocklistForm(CustomForm):
class Meta:
model = models.IPBlocklist
fields = ["address"]
class ServerForm(CustomForm):
class Meta:
model = models.FederatedServer
exclude = ["remote_id"]
class AutoModRuleForm(CustomForm):
class Meta:
model = models.AutoMod
fields = ["string_match", "flag_users", "flag_statuses", "created_by"]
class IntervalScheduleForm(StyledForm):
class Meta:
model = IntervalSchedule
fields = ["every", "period"]
widgets = {
"every": forms.NumberInput(attrs={"aria-describedby": "desc_every"}),
"period": forms.Select(attrs={"aria-describedby": "desc_period"}),
}
# pylint: disable=arguments-differ
def save(self, request, *args, **kwargs):
"""This is an outside model so the perms check works differently"""
if not request.user.has_perm("bookwyrm.moderate_user"):
raise PermissionDenied()
return super().save(*args, **kwargs)
|
service | settings | # =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
import json
import os.path
import pickle
import urllib.error
import urllib.parse
import urllib.request
from collections import namedtuple
import config
import eos.config
import wx
from logbook import Logger
from service.const import GraphDpsDroneMode
pyfalog = Logger(__name__)
class SettingsProvider:
if config.savePath:
BASE_PATH = os.path.join(config.savePath, "settings")
settings = {}
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = SettingsProvider()
return cls._instance
def __init__(self):
if hasattr(self, "BASE_PATH"):
if not os.path.exists(self.BASE_PATH):
os.mkdir(self.BASE_PATH)
def getSettings(self, area, defaults=None):
# type: (basestring, dict) -> service.Settings
# NOTE: needed to change for tests
# TODO: Write to memory with mmap -> https://docs.python.org/2/library/mmap.html
settings_obj = self.settings.get(area)
if settings_obj is None: # and hasattr(self, 'BASE_PATH'):
canonical_path = (
os.path.join(self.BASE_PATH, area) if hasattr(self, "BASE_PATH") else ""
)
if not os.path.exists(canonical_path): # path string or empty string.
info = {}
if defaults:
info.update(defaults)
else:
try:
with open(canonical_path, "rb") as f:
info = pickle.load(f)
for item in defaults:
if item not in info:
info[item] = defaults[item]
except (KeyboardInterrupt, SystemExit):
raise
except:
info = {}
info.update(defaults)
self.settings[area] = settings_obj = Settings(canonical_path, info)
return settings_obj
def saveAll(self):
for settings in self.settings.values():
settings.save()
class Settings:
def __init__(self, location, info):
# type: (basestring, dict) -> None
# path string or empty string.
self.location = location
self.info = info
# def save(self):
# f = open(self.location, "wb")
# cPickle.dump(self.info, f, cPickle.HIGHEST_PROTOCOL)
def save(self):
# NOTE: needed to change for tests
if self.location is None or not self.location:
return
# NOTE: with + open -> file handle auto close
with open(self.location, "wb") as f:
pickle.dump(self.info, f, pickle.HIGHEST_PROTOCOL)
def __getitem__(self, k):
try:
return self.info[k]
except KeyError as e:
pyfalog.warning("Failed to get setting for '{0}'. Exception: {1}", k, e)
return None
def __setitem__(self, k, v):
self.info[k] = v
def __iter__(self):
return self.info.__iter__()
def iterkeys(self):
return iter(self.info.keys())
def itervalues(self):
return iter(self.info.values())
def iteritems(self):
return iter(self.info.items())
def keys(self):
return list(self.info.keys())
def values(self):
return list(self.info.values())
def items(self):
return list(self.info.items())
class NetworkSettings:
_instance = None
# constants for serviceNetworkDefaultSettings["mode"] parameter
PROXY_MODE_NONE = 0 # 0 - No proxy
PROXY_MODE_AUTODETECT = 1 # 1 - Auto-detected proxy settings
PROXY_MODE_MANUAL = 2 # 2 - Manual proxy settings
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = NetworkSettings()
return cls._instance
def __init__(self):
serviceNetworkDefaultSettings = {
"mode": self.PROXY_MODE_AUTODETECT,
"type": "https",
"address": "",
"port": "",
"access": 15,
"login": None,
"password": None,
}
self.serviceNetworkSettings = SettingsProvider.getInstance().getSettings(
"pyfaServiceNetworkSettings", serviceNetworkDefaultSettings
)
def isEnabled(self, type):
if type & self.serviceNetworkSettings["access"]:
return True
return False
def toggleAccess(self, type, toggle=True):
bitfield = self.serviceNetworkSettings["access"]
if toggle: # Turn bit on
self.serviceNetworkSettings["access"] = type | bitfield
else: # Turn bit off
self.serviceNetworkSettings["access"] = ~type & bitfield
def getMode(self):
return self.serviceNetworkSettings["mode"]
def getAddress(self):
return self.serviceNetworkSettings["address"]
def getPort(self):
return self.serviceNetworkSettings["port"]
def getType(self):
return self.serviceNetworkSettings["type"]
def getAccess(self):
return self.serviceNetworkSettings["access"]
def setMode(self, mode):
self.serviceNetworkSettings["mode"] = mode
def setAddress(self, addr):
self.serviceNetworkSettings["address"] = addr
def setPort(self, port):
self.serviceNetworkSettings["port"] = port
def setType(self, type):
self.serviceNetworkSettings["type"] = type
def setAccess(self, access):
self.serviceNetworkSettings["access"] = access
@staticmethod
def autodetect():
proxy = None
proxydict = urllib.request.ProxyHandler().proxies
validPrefixes = ("http", "https")
for prefix in validPrefixes:
if prefix not in proxydict:
continue
proxyline = proxydict[prefix]
proto = "{0}://".format(prefix)
if proxyline[: len(proto)] == proto:
proxyline = proxyline[len(proto) :]
# sometimes proxyline contains "user:password@" section before proxy address
# remove it if present, so later split by ":" works
if "@" in proxyline:
userPass, proxyline = proxyline.split("@")
# TODO: do something with user/password?
proxAddr, proxPort = proxyline.split(":")
proxPort = int(proxPort.rstrip("/"))
proxy = (proxAddr, proxPort)
break
return proxy
def getProxySettings(self):
if self.getMode() == self.PROXY_MODE_NONE:
return None
if self.getMode() == self.PROXY_MODE_AUTODETECT:
return self.autodetect()
if self.getMode() == self.PROXY_MODE_MANUAL:
return self.getAddress(), int(self.getPort())
def getProxyAuthDetails(self):
if self.getMode() == self.PROXY_MODE_NONE:
return None
if (self.serviceNetworkSettings["login"] is None) or (
self.serviceNetworkSettings["password"] is None
):
return None
# in all other cases, return tuple of (login, password)
return (
self.serviceNetworkSettings["login"],
self.serviceNetworkSettings["password"],
)
def setProxyAuthDetails(self, login, password):
if (login is None) or (password is None):
self.serviceNetworkSettings["login"] = None
self.serviceNetworkSettings["password"] = None
return
if login == "": # empty login unsets proxy auth info
self.serviceNetworkSettings["login"] = None
self.serviceNetworkSettings["password"] = None
return
self.serviceNetworkSettings["login"] = login
self.serviceNetworkSettings["password"] = password
def getProxySettingsInRequestsFormat(self) -> dict:
proxies = {}
proxy_settings = self.getProxySettings()
if proxy_settings is not None:
# form proxy address in format "http://host:port
proxy_host_port = "{}:{}".format(proxy_settings[0], proxy_settings[1])
proxy_auth_details = self.getProxyAuthDetails()
user_pass = ""
if proxy_auth_details is not None:
# construct prefix in form "user:password@"
user_pass = "{}:{}@".format(
proxy_auth_details[0], proxy_auth_details[1]
)
proxies = {
"http": "http://" + user_pass + proxy_host_port,
"https": "http://" + user_pass + proxy_host_port,
}
return proxies
class HTMLExportSettings:
"""
Settings used by the HTML export feature.
"""
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = HTMLExportSettings()
return cls._instance
def __init__(self):
serviceHTMLExportDefaultSettings = {
"path": config.savePath + os.sep + "pyfaFits.html",
"minimal": False,
}
self.serviceHTMLExportSettings = SettingsProvider.getInstance().getSettings(
"pyfaServiceHTMLExportSettings", serviceHTMLExportDefaultSettings
)
def getMinimalEnabled(self):
return self.serviceHTMLExportSettings["minimal"]
def setMinimalEnabled(self, minimal):
self.serviceHTMLExportSettings["minimal"] = minimal
def getPath(self):
return self.serviceHTMLExportSettings["path"]
def setPath(self, path):
self.serviceHTMLExportSettings["path"] = path
class UpdateSettings:
"""
Settings used by update notification
"""
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = UpdateSettings()
return cls._instance
def __init__(self):
# Settings
# Updates are completely suppressed via network settings
# prerelease - If True, suppress prerelease notifications
# version - Set to release tag that user does not want notifications for
serviceUpdateDefaultSettings = {"prerelease": True, "version": None}
self.serviceUpdateSettings = SettingsProvider.getInstance().getSettings(
"pyfaServiceUpdateSettings", serviceUpdateDefaultSettings
)
def get(self, type):
return self.serviceUpdateSettings[type]
def set(self, type, value):
self.serviceUpdateSettings[type] = value
class EsiSettings:
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = EsiSettings()
return cls._instance
def __init__(self):
# SSO Mode:
# 0 - pyfa.io
# 1 - custom application
# LoginMode:
# 0 - Server Start Up
# 1 - User copy and paste data from website to pyfa
defaults = {
"ssoMode": 0,
"loginMode": 0,
"clientID": "",
"clientSecret": "",
"timeout": 60,
"server": "Tranquility",
"exportCharges": True,
"enforceJwtExpiration": True,
}
self.settings = SettingsProvider.getInstance().getSettings(
"pyfaServiceEsiSettings", defaults
)
def get(self, type):
return self.settings[type]
def set(self, type, value):
self.settings[type] = value
class StatViewSettings:
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = StatViewSettings()
return cls._instance
def __init__(self):
# mode
# 0 - Do not show
# 1 - Minimal/Text Only View
# 2 - Full View
serviceStatViewDefaultSettings = {
"resources": 2,
"resistances": 2,
"recharge": 2,
"firepower": 2,
"capacitor": 2,
"targetingMisc": 1,
"price": 2,
"miningyield": 2,
"drones": 2,
"outgoing": 2,
}
self.serviceStatViewDefaultSettings = (
SettingsProvider.getInstance().getSettings(
"pyfaServiceStatViewSettings", serviceStatViewDefaultSettings
)
)
def get(self, type):
return self.serviceStatViewDefaultSettings[type]
def set(self, type, value):
self.serviceStatViewDefaultSettings[type] = value
class MarketPriceSettings:
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = MarketPriceSettings()
return cls._instance
def __init__(self):
# mode
# 0 - Do not add to total
# 1 - Add to total
PriceMenuDefaultSettings = {
"drones": 1,
"cargo": 1,
"character": 0,
"marketMGJumpMode": 0,
"marketMGEmptyMode": 1,
"marketMGSearchMode": 0,
"marketMGMarketSelectMode": 0,
}
self.PriceMenuDefaultSettings = SettingsProvider.getInstance().getSettings(
"pyfaPriceMenuSettings", PriceMenuDefaultSettings
)
def get(self, type):
return self.PriceMenuDefaultSettings[type]
def set(self, type, value):
self.PriceMenuDefaultSettings[type] = value
class ContextMenuSettings:
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = ContextMenuSettings()
return cls._instance
def __init__(self):
# mode
# 0 - Do not show
# 1 - Show
ContextMenuDefaultSettings = {
"ammoPattern": 1,
"changeAffectingSkills": 1,
"metaSwap": 1,
"project": 1,
"moduleFill": 1,
"spoolup": 1,
"additionsCopyPaste": 1,
}
self.ContextMenuDefaultSettings = SettingsProvider.getInstance().getSettings(
"pyfaContextMenuSettings", ContextMenuDefaultSettings
)
def get(self, type):
return self.ContextMenuDefaultSettings[type]
def set(self, type, value):
self.ContextMenuDefaultSettings[type] = value
class EOSSettings:
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = EOSSettings()
return cls._instance
def __init__(self):
self.EOSSettings = SettingsProvider.getInstance().getSettings(
"pyfaEOSSettings", eos.config.settings
)
def get(self, type):
return self.EOSSettings[type]
def set(self, type, value):
self.EOSSettings[type] = value
class GraphSettings:
_instance = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = GraphSettings()
return cls._instance
def __init__(self):
defaults = {
"mobileDroneMode": GraphDpsDroneMode.auto,
"ignoreDCR": False,
"ignoreResists": True,
"ignoreLockRange": True,
"applyProjected": True,
}
self.settings = SettingsProvider.getInstance().getSettings(
"graphSettings", defaults
)
def get(self, type):
return self.settings[type]
def set(self, type, value):
self.settings[type] = value
class LocaleSettings:
_instance = None
DEFAULT = "en_US"
defaults = {
"locale": DEFAULT,
"eos_locale": "Auto", # flag for "Default" which is the same as the locale or, if not available, English
}
def __init__(self):
self.settings = SettingsProvider.getInstance().getSettings(
"localeSettings", self.defaults
)
try:
with open(
os.path.join(config.pyfaPath, "locale", "progress.json"), "r"
) as f:
self.progress_data = json.load(f)
except FileNotFoundError:
self.progress_data = None
@classmethod
def getInstance(cls):
if cls._instance is None:
cls._instance = LocaleSettings()
return cls._instance
def get_progress(self, lang):
if self.progress_data is None:
return None
if lang == self.defaults["locale"]:
return None
return self.progress_data[lang]
@classmethod
def supported_langauges(cls):
"""Requires the application to be initialized, otherwise wx.Translation isn't set."""
pyfalog.info(
f'using "{config.CATALOG}" to fetch languages, relatively base path "{os.getcwd()}"'
)
return {
x: wx.Locale.FindLanguageInfo(x)
for x in wx.Translations.Get().GetAvailableTranslations(config.CATALOG)
}
def get(self, key):
"""gets the raw value fo the setting"""
return self.settings[key]
def get_eos_locale(self):
"""gets the effective value of the setting"""
val = self.settings["eos_locale"]
return (
val
if val != self.defaults["eos_locale"]
else self.settings["locale"].split("_")[0]
)
def set(self, key, value):
if key == "locale" and value not in self.supported_langauges():
self.settings[key] = self.DEFAULT
self.settings[key] = value
|
plugins | delfi | """
$description Internet portal in Estonia, Latvia, and Lithuania providing daily news, ranging from gardening to politics.
$url delfi.lt
$url delfi.ee
$url delfi.lv
$type vod
"""
import itertools
import logging
import re
from urllib.parse import urlparse
from streamlink.plugin import Plugin, PluginError, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
from streamlink.utils.parse import parse_qsd
from streamlink.utils.url import update_scheme
log = logging.getLogger(__name__)
@pluginmatcher(
re.compile(
r"https?://(?:[\w-]+\.)?delfi\.(?P<tld>lt|lv|ee)",
)
)
class Delfi(Plugin):
_api = {
"lt": "https://g2.dcdn.lt/vfe/data.php",
"lv": "https://g.delphi.lv/vfe/data.php",
"ee": "https://g4.nh.ee/vfe/data.php",
}
def _get_streams_api(self, video_id):
log.debug(f"Found video ID: {video_id}")
tld = self.match.group("tld")
try:
data = self.session.http.get(
self._api.get(tld, "lt"),
params=dict(video_id=video_id),
schema=validate.Schema(
validate.parse_json(),
{
"success": True,
"data": {
"versions": {
str: validate.all(
[
{
"type": str,
"src": str,
}
],
validate.filter(lambda item: item["type"] == "application/x-mpegurl"),
),
},
},
},
validate.get(("data", "versions")),
),
)
except PluginError:
log.error("Failed to get streams from API")
return
for stream in itertools.chain(*data.values()):
src = update_scheme("https://", stream["src"], force=False)
yield from HLSStream.parse_variant_playlist(self.session, src).items()
def _get_streams_delfi(self, src):
try:
data = self.session.http.get(
src,
schema=validate.Schema(
validate.parse_html(),
validate.xml_xpath_string(".//script[contains(text(),'embedJs.setAttribute(')][1]/text()"),
validate.none_or_all(
re.compile(r"embedJs\.setAttribute\('src',\s*'(.+?)'"),
validate.none_or_all(
validate.get(1),
validate.transform(lambda url: parse_qsd(urlparse(url).fragment)),
{"stream": str},
validate.get("stream"),
validate.parse_json(),
{
"versions": [
{
"hls": str,
}
]
},
validate.get("versions"),
),
),
),
)
except PluginError:
log.error("Failed to get streams from iframe")
return
for stream in data:
src = update_scheme("https://", stream["hls"], force=False)
yield from HLSStream.parse_variant_playlist(self.session, src).items()
def _get_streams(self):
root = self.session.http.get(
self.url,
schema=validate.Schema(
validate.parse_html(),
),
)
video_id = root.xpath("string(.//div[@data-provider='dvideo'][@data-id][1]/@data-id)")
if video_id:
return self._get_streams_api(str(video_id))
yt_id = root.xpath("string(.//script[contains(@src,'/yt.js')][@data-video]/@data-video)")
if yt_id:
return self.session.streams(f"https://www.youtube.com/watch?v={yt_id}")
yt_iframe = root.xpath("string(.//iframe[starts-with(@src,'https://www.youtube.com/')][1]/@src)")
if yt_iframe:
return self.session.streams(str(yt_iframe))
delfi = root.xpath("string(.//iframe[@name='delfi-stream'][@src][1]/@src)")
if delfi:
return self._get_streams_delfi(str(delfi))
__plugin__ = Delfi
|
scripts | dump_version | """
This script is solely used when generating builds. It generates a version number automatically using
git tags as it's basis. Whenever a build is created, run this file beforehand and it should replace
the old version number with the new one in VERSION.YML
"""
import os
import subprocess
import yaml
def rreplace(s, old, new, occurrence):
li = s.rsplit(old, occurrence)
return new.join(li)
with open("version.yml", "r+") as file:
data = yaml.load(file, Loader=yaml.SafeLoader)
file.seek(0)
file.truncate()
# todo: run Version() on the tag to ensure that it's of proper formatting - fail a test if not and prevent building
# python's versioning spec doesn't handle the same format git describe outputs, so convert it.
label = (
os.environ["PYFA_VERSION"].split("-")
if "PYFA_VERSION" in os.environ
else subprocess.check_output(["git", "describe", "--tags"])
.strip()
.decode()
.split("-")
)
label = (
"-".join(label[:-2]) + "+" + "-".join(label[-2:])
if len(label) > 1
else label[0]
)
label = rreplace(label, "+", "-", label.count("+") - 1)
print(label)
data["version"] = label
yaml.dump(data, file, default_flow_style=False)
|
tagsources | TagSource | # An interface specification for Tag Sources
#
# A Tag Source should define a class that derives from TagSOurce, and sets
# a module attribue "info" to an instance of that class in order to interface
# with puddletag
from puddlestuff.constants import CHECKBOX, COMBO, SPINBOX, TAGLIST, TEXT
from puddlestuff.util import translate
class UnimplementedMethod(Exception):
pass
class TagSource:
# A name for the tag source. Keep it short.
name = "Generic Tag Source"
# A list of tags to group album releases by.
group_by = ["album", "artist"]
# A ToolTip that displays when hovering above the Search box
# Set to None to disable ToolTip
tooltip = translate("Group", "HTML Tooltip Content")
# A Set of preferences to display on the preference dialog box
# Set to None to disable Preferences for this Tag Source
preferences = [
[translate("Discogs", "A Text Option"), TEXT, "Default Value"],
[
translate("Discogs", "A Checkbox Option"),
CHECKBOX,
True,
], # Default value, True or False
[
translate("Discogs", "A Spinbox Integer"),
SPINBOX,
[0, 100, 50],
], # Minimum, Maximum, Default values
[
translate("Discogs", "A Tag List"),
TAGLIST,
[],
], # Undocumented, no exemplar and unimplemented
[
translate("Discogs", "A Combobox Option"),
COMBO,
[[translate("Discogs", "Option 1"), translate("Discogs", "Option 2")], 1],
], # List of option texts and default selection
]
# Note the TAGLIST preference type is currently not implemented.
# See: puddlestuff.mainwin.tagsources.SimpleDialog
def retrieve(self, release):
"""
Called to retrieve release information when expanding a release in the Tag Sources Tree view
:param release: A dict containing information about an album release
:return: a 2-tuple containing
a dict of album release tags/fields and their values, and
a list of dicts containing album track tags/fields and their values
"""
raise UnimplementedMethod("TagsSource.retrieve")
def keyword_search(self, text):
"""
Called when Search button is clicked, if the Search box contains some text
:param text: The text in the Search box
:return: A list of 2-tuples as described in retrieve() above
"""
raise UnimplementedMethod("TagsSource.keyword_search")
def search(self, files_or_group, files=None):
"""
Called when Search button is clicked, if the Search box is empty
:param files_or_group: if self.group_by is Falsey, a list of selected audio files as ModelTag instances.
(see puddlestuff.tagmodel.model_tag.ModelTag)
if if self.group_by is a list of attributes then the value of the first group_by tag.
For example if the first group_by tag is "artist" then this will be name of an artist
to search for.
:param files: Provided only if self.group_by is defined. In which case it is the contents of the named group.
This will be a dict level for each remaining group_by tag, with a list of files at the bottom
(a ModelTag instances - see puddlestuff.tagmodel.model_tag.ModelTag).
:return: A list of 2-tuples as described in retrieve() above
"""
raise UnimplementedMethod("TagsSource.search")
def submit(self, files):
"""
If defined the a Submit button is presented, and when clicked calls here
for submitting data from the list of suplied files (to the remote web database)
If not implementing this do so explicitly with:
delattr(self, "submit")
in the Tag Sources __init__() method.
:param files: a list of selected audio files as ModelTag instances.
(see puddlestuff.tagmodel.model_tag.ModelTag)
:return: Nothing. No result is checked.
"""
raise UnimplementedMethod("TagsSource.submit")
|
controllers | promotecontroller | # The contents of this file are subject to the Common Public Attribution
# License Version 1.0. (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
# software over a computer network and provide for limited attribution for the
# Original Developer. In addition, Exhibit A has been modified to be consistent
# with Exhibit B.
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
# the specific language governing rights and limitations under the License.
#
# The Original Code is reddit.
#
# The Original Developer is the Initial Developer. The Initial Developer of
# the Original Code is reddit Inc.
#
# All portions of the code written by reddit are Copyright (c) 2006-2015 reddit
# Inc. All Rights Reserved.
###############################################################################
import hashlib
import hmac
import json
import mimetypes
import os
import urllib
from collections import defaultdict
from datetime import datetime, timedelta
from babel.dates import format_date
from babel.numbers import format_number
from pylons import app_globals as g
from pylons import request
from pylons import tmpl_context as c
from pylons.i18n import N_, _
from r2.config import feature
from r2.controllers.api import ApiController
from r2.controllers.listingcontroller import ListingController
from r2.controllers.reddit_base import RedditController
from r2.lib import hooks, inventory, media, promote, s3_helpers
from r2.lib.authorize import (
PROFILE_LIMIT,
add_or_update_payment_method,
get_or_create_customer_profile,
)
from r2.lib.authorize.api import AuthorizeNetException
from r2.lib.base import abort
from r2.lib.db import queries
from r2.lib.errors import errors
from r2.lib.filters import jssafe, scriptsafe_dumps, websafe
from r2.lib.memoize import memoize
from r2.lib.menus import NamedButton, NavButton, NavMenu, QueryButton
from r2.lib.pages import (
LinkInfoPage,
PaymentForm,
PromoteInventory,
PromoteLinkEdit,
PromoteLinkNew,
PromotePage,
PromoteReport,
Reddit,
RefundPage,
RenderableCampaign,
SponsorLookupUser,
)
from r2.lib.pages.things import default_thing_wrapper, wrap_links
from r2.lib.system_messages import user_added_messages
from r2.lib.template_helpers import add_sr, format_html
from r2.lib.utils import UrlParser, constant_time_compare, is_subdomain, to36, to_date
from r2.lib.validator import (
VAccountByName,
ValidAddress,
ValidCard,
ValidEmail,
VBoolean,
VByName,
VCollection,
VDate,
VExistingUname,
VFloat,
VFrequencyCap,
VImageType,
VInt,
VLength,
VLink,
VList,
VLocation,
VMarkdownLength,
VModhash,
VOneOf,
VOSVersion,
VPrintable,
VPriority,
VPromoCampaign,
VPromoTarget,
VRatelimit,
VShamedDomain,
VSponsor,
VSponsorAdmin,
VSponsorAdminOrAdminSecret,
VSubmitSR,
VTitle,
VUploadLength,
VUrl,
VVerifiedSponsor,
json_validate,
nop,
noresponse,
validate,
validatedForm,
validatedMultipartForm,
)
from r2.models import (
PROMOTE_STATUS,
Account,
AccountsByCanonicalEmail,
Collection,
Frontpage,
Link,
Message,
NotFound,
PromoCampaign,
PromotionLog,
PromotionPrices,
PromotionWeights,
Subreddit,
Target,
calc_impressions,
)
from r2.models.promo import PROMOTE_COST_BASIS, PROMOTE_PRIORITIES
IOS_DEVICES = (
"iPhone",
"iPad",
"iPod",
)
ANDROID_DEVICES = (
"phone",
"tablet",
)
ADZERK_URL_MAX_LENGTH = 499
EXPIRES_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
ALLOWED_IMAGE_TYPES = set(["image/jpg", "image/jpeg", "image/png"])
def _format_expires(expires):
return expires.strftime(EXPIRES_DATE_FORMAT)
def _get_callback_hmac(username, key, expires):
secret = g.secrets["s3_direct_post_callback"]
expires_str = _format_expires(expires)
data = "|".join([username, key, expires_str])
return hmac.new(secret, data, hashlib.sha256).hexdigest()
def _force_images(link, thumbnail, mobile):
changed = False
if thumbnail:
media.force_thumbnail(link, thumbnail["data"], thumbnail["ext"])
changed = True
if feature.is_enabled("mobile_targeting") and mobile:
media.force_mobile_ad_image(link, mobile["data"], mobile["ext"])
changed = True
return changed
def campaign_has_oversold_error(form, campaign):
if campaign.priority.inventory_override:
return
return has_oversold_error(
form,
campaign,
start=campaign.start_date,
end=campaign.end_date,
total_budget_pennies=campaign.total_budget_pennies,
cpm=campaign.bid_pennies,
target=campaign.target,
location=campaign.location,
)
def has_oversold_error(
form, campaign, start, end, total_budget_pennies, cpm, target, location
):
ndays = (to_date(end) - to_date(start)).days
total_request = calc_impressions(total_budget_pennies, cpm)
daily_request = int(total_request / ndays)
oversold = inventory.get_oversold(
target, start, end, daily_request, ignore=campaign, location=location
)
if oversold:
min_daily = min(oversold.values())
available = min_daily * ndays
msg_params = {
"available": format_number(available, locale=c.locale),
"target": target.pretty_name,
"start": start.strftime("%m/%d/%Y"),
"end": end.strftime("%m/%d/%Y"),
}
c.errors.add(
errors.OVERSOLD_DETAIL, field="total_budget_dollars", msg_params=msg_params
)
form.has_errors("total_budget_dollars", errors.OVERSOLD_DETAIL)
return True
def _key_to_dict(key, data=False):
timer = g.stats.get_timer(
"providers.s3.get_ads_key_meta.with_%s" % ("data" if data else "no_data")
)
timer.start()
url = key.generate_url(expires_in=0, query_auth=False)
# Generating an S3 url without authentication fails for IAM roles.
# This removes the bad query params.
# see: https://github.com/boto/boto/issues/2043
url = promote.update_query(url, {"x-amz-security-token": None}, unset=True)
result = {
"url": url,
"data": key.get_contents_as_string() if data else None,
"ext": key.get_metadata("ext"),
}
timer.stop()
return result
def _get_ads_keyspace(thing):
return "ads/%s/" % thing._fullname
def _get_ads_images(thing, data=False, **kwargs):
images = {}
timer = g.stats.get_timer("providers.s3.get_ads_image_keys")
timer.start()
keys = s3_helpers.get_keys(
g.s3_client_uploads_bucket, prefix=_get_ads_keyspace(thing), **kwargs
)
timer.stop()
for key in keys:
filename = os.path.basename(key.key)
name, ext = os.path.splitext(filename)
if name not in ("mobile", "thumbnail"):
continue
images[name] = _key_to_dict(key, data=data)
return images
def _clear_ads_images(thing):
timer = g.stats.get_timer("providers.s3.delete_ads_image_keys")
timer.start()
s3_helpers.delete_keys(g.s3_client_uploads_bucket, prefix=_get_ads_keyspace(thing))
timer.stop()
class PromoteController(RedditController):
@validate(VSponsor())
def GET_new_promo(self):
ads_images = _get_ads_images(c.user)
images = {k: v.get("url") for k, v in ads_images.iteritems()}
return PromotePage(
title=_("create sponsored link"),
content=PromoteLinkNew(images),
extra_js_config={
"ads_virtual_page": "new-promo",
},
).render()
@validate(VSponsor("link"), link=VLink("link"))
def GET_edit_promo(self, link):
if not link or link.promoted is None:
return self.abort404()
rendered = wrap_links(link, skip=False)
form = PromoteLinkEdit(link, rendered)
page = PromotePage(
title=_("edit sponsored link"),
content=form,
show_sidebar=False,
extension_handling=False,
)
return page.render()
@validate(VSponsorAdmin(), link=VLink("link"), campaign=VPromoCampaign("campaign"))
def GET_refund(self, link, campaign):
if link._id != campaign.link_id:
return self.abort404()
content = RefundPage(link, campaign)
return Reddit("refund", content=content, show_sidebar=False).render()
@validate(
VVerifiedSponsor("link"),
link=VLink("link"),
campaign=VPromoCampaign("campaign"),
)
def GET_pay(self, link, campaign):
if link._id != campaign.link_id:
return self.abort404()
# no need for admins to play in the credit card area
if c.user_is_loggedin and c.user._id != link.author_id:
return self.abort404()
if g.authorizenetapi:
data = get_or_create_customer_profile(c.user)
content = PaymentForm(
link,
campaign,
customer_id=data.customerProfileId,
profiles=data.paymentProfiles,
max_profiles=PROFILE_LIMIT,
)
else:
content = None
res = LinkInfoPage(
link=link,
content=content,
show_sidebar=False,
extra_js_config={
"ads_virtual_page": "checkout",
},
)
return res.render()
class SponsorController(PromoteController):
@validate(
VSponsorAdminOrAdminSecret("secret"),
start=VDate("startdate"),
end=VDate("enddate"),
link_text=nop("link_text"),
owner=VAccountByName("owner"),
grouping=VOneOf("grouping", ("total", "day"), default="total"),
)
def GET_report(self, start, end, grouping, link_text=None, owner=None):
now = datetime.now(g.tz).replace(hour=0, minute=0, second=0, microsecond=0)
if not start or not end:
start = promote.promo_datetime_now(offset=1).date()
end = promote.promo_datetime_now(offset=8).date()
c.errors.remove((errors.BAD_DATE, "startdate"))
c.errors.remove((errors.BAD_DATE, "enddate"))
end = end or now - timedelta(days=1)
start = start or end - timedelta(days=7)
links = []
bad_links = []
owner_name = owner.name if owner else ""
if owner:
campaign_ids = PromotionWeights.get_campaign_ids(
start, end, author_id=owner._id
)
campaigns = PromoCampaign._byID(campaign_ids, data=True)
link_ids = {camp.link_id for camp in campaigns.itervalues()}
links.extend(Link._byID(link_ids, data=True, return_dict=False))
if link_text is not None:
id36s = link_text.replace(",", " ").split()
try:
links_from_text = Link._byID36(id36s, data=True)
except NotFound:
links_from_text = {}
bad_links = [id36 for id36 in id36s if id36 not in links_from_text]
links.extend(links_from_text.values())
content = PromoteReport(
links,
link_text,
owner_name,
bad_links,
start,
end,
group_by_date=grouping == "day",
)
if c.render_style == "csv":
return content.as_csv()
else:
return PromotePage(
title=_("sponsored link report"), content=content
).render()
@validate(
VSponsorAdmin(),
start=VDate("startdate"),
end=VDate("enddate"),
sr_name=nop("sr_name"),
collection_name=nop("collection_name"),
)
def GET_promote_inventory(self, start, end, sr_name, collection_name):
if not start or not end:
start = promote.promo_datetime_now(offset=1).date()
end = promote.promo_datetime_now(offset=8).date()
c.errors.remove((errors.BAD_DATE, "startdate"))
c.errors.remove((errors.BAD_DATE, "enddate"))
target = Target(Frontpage.name)
if sr_name:
try:
sr = Subreddit._by_name(sr_name)
target = Target(sr.name)
except NotFound:
c.errors.add(errors.SUBREDDIT_NOEXIST, field="sr_name")
elif collection_name:
collection = Collection.by_name(collection_name)
if not collection:
c.errors.add(errors.COLLECTION_NOEXIST, field="collection_name")
else:
target = Target(collection)
content = PromoteInventory(start, end, target)
if c.render_style == "csv":
return content.as_csv()
else:
return PromotePage(
title=_("sponsored link inventory"), content=content
).render()
@validate(
VSponsorAdmin(),
id_user=VByName("name", thing_cls=Account),
email=ValidEmail("email"),
)
def GET_lookup_user(self, id_user, email):
email_users = AccountsByCanonicalEmail.get_accounts(email)
content = SponsorLookupUser(
id_user=id_user, email=email, email_users=email_users
)
return PromotePage(title="look up user", content=content).render()
class PromoteListingController(ListingController):
where = "promoted"
render_cls = PromotePage
titles = {
"future_promos": N_("unapproved promoted links"),
"pending_promos": N_("accepted promoted links"),
"unpaid_promos": N_("unpaid promoted links"),
"rejected_promos": N_("rejected promoted links"),
"live_promos": N_("live promoted links"),
"edited_live_promos": N_("edited live promoted links"),
"all": N_("all promoted links"),
}
base_path = "/promoted"
default_filters = [
NamedButton("all_promos", dest="", use_params=False, aliases=["/sponsor"]),
NamedButton("future_promos", use_params=False),
NamedButton("unpaid_promos", use_params=False),
NamedButton("rejected_promos", use_params=False),
NamedButton("pending_promos", use_params=False),
NamedButton("live_promos", use_params=False),
NamedButton("edited_live_promos", use_params=False),
]
def title(self):
return _(self.titles[self.sort])
@property
def title_text(self):
return _("promoted by you")
@property
def menus(self):
filters = [
NamedButton("all_promos", dest="", use_params=False, aliases=["/sponsor"]),
NamedButton("future_promos", use_params=False),
NamedButton("unpaid_promos", use_params=False),
NamedButton("rejected_promos", use_params=False),
NamedButton("pending_promos", use_params=False),
NamedButton("live_promos", use_params=False),
]
menus = [
NavMenu(filters, base_path=self.base_path, title="show", type="lightdrop")
]
return menus
def builder_wrapper(self, thing):
builder_wrapper = default_thing_wrapper()
w = builder_wrapper(thing)
w.hide_after_seen = self.sort == "future_promos"
return w
def keep_fn(self):
def keep(item):
if self.sort == "future_promos":
# this sort is used to review links that need to be approved
# skip links that don't have any paid campaigns
campaigns = list(PromoCampaign._by_link(item._id))
if not any(promote.authed_or_not_needed(camp) for camp in campaigns):
return False
if item.promoted and not item._deleted:
return True
else:
return False
return keep
def query(self):
if self.sort == "future_promos":
return queries.get_unapproved_links(c.user._id)
elif self.sort == "pending_promos":
return queries.get_accepted_links(c.user._id)
elif self.sort == "unpaid_promos":
return queries.get_unpaid_links(c.user._id)
elif self.sort == "rejected_promos":
return queries.get_rejected_links(c.user._id)
elif self.sort == "live_promos":
return queries.get_live_links(c.user._id)
elif self.sort == "edited_live_promos":
return queries.get_edited_live_links(c.user._id)
elif self.sort == "all":
return queries.get_promoted_links(c.user._id)
@validate(VSponsor())
def GET_listing(self, sort="all", **env):
self.sort = sort
return ListingController.GET_listing(self, **env)
class SponsorListingController(PromoteListingController):
titles = dict(
PromoteListingController.titles.items()
+ {
"underdelivered": N_("underdelivered promoted links"),
"reported": N_("reported promoted links"),
"house": N_("house promoted links"),
"fraud": N_("fraud suspected promoted links"),
}.items()
)
base_path = "/sponsor/promoted"
@property
def title_text(self):
return _("promos on reddit")
@property
def menus(self):
managed_menu = NavMenu(
[
QueryButton(
"exclude managed", dest=None, query_param="include_managed"
),
QueryButton(
"include managed", dest="yes", query_param="include_managed"
),
],
base_path=request.path,
type="lightdrop",
)
if self.sort in {"underdelivered", "reported", "house", "fraud"}:
menus = []
if self.sort == "fraud":
fraud_menu = NavMenu(
[
QueryButton(
"exclude unpaid", dest=None, query_param="exclude_unpaid"
),
QueryButton(
"include unpaid", dest="no", query_param="exclude_unpaid"
),
],
base_path=request.path,
type="lightdrop",
)
menus.append(fraud_menu)
if self.sort in ("house", "fraud"):
menus.append(managed_menu)
else:
menus = super(SponsorListingController, self).menus
menus.append(managed_menu)
if self.sort == "live_promos":
srnames = promote.all_live_promo_srnames()
buttons = [NavButton("all", "", use_params=True)]
try:
srnames.remove(Frontpage.name)
frontbutton = NavButton(
"FRONTPAGE",
Frontpage.name,
use_params=True,
aliases=["/promoted/live_promos/%s" % urllib.quote(Frontpage.name)],
)
buttons.append(frontbutton)
except KeyError:
pass
srnames = sorted(srnames, key=lambda name: name.lower())
buttons.extend(NavButton(name, name, use_params=True) for name in srnames)
base_path = self.base_path + "/live_promos"
menus.append(
NavMenu(
buttons, base_path=base_path, title="subreddit", type="lightdrop"
)
)
return menus
@classmethod
@memoize("live_by_subreddit", time=300)
def _live_by_subreddit(cls, sr_names):
promotuples = promote.get_live_promotions(sr_names)
return [pt.link for pt in promotuples]
def live_by_subreddit(cls, sr):
return cls._live_by_subreddit([sr.name])
@classmethod
@memoize("house_link_names", time=60)
def get_house_link_names(cls):
now = promote.promo_datetime_now()
campaign_ids = PromotionWeights.get_campaign_ids(now)
q = PromoCampaign._query(
PromoCampaign.c._id.in_(campaign_ids),
PromoCampaign.c.priority_name == "house",
data=True,
)
link_names = {Link._fullname_from_id36(to36(camp.link_id)) for camp in q}
return sorted(link_names, reverse=True)
def keep_fn(self):
base_keep_fn = PromoteListingController.keep_fn(self)
if self.exclude_unpaid:
exclude = set(queries.get_all_unpaid_links())
else:
exclude = set()
def keep(item):
if not self.include_managed and item.managed_promo:
return False
if self.exclude_unpaid and item._fullname in exclude:
return False
return base_keep_fn(item)
return keep
def query(self):
if self.sort == "future_promos":
return queries.get_all_unapproved_links()
elif self.sort == "pending_promos":
return queries.get_all_accepted_links()
elif self.sort == "unpaid_promos":
return queries.get_all_unpaid_links()
elif self.sort == "rejected_promos":
return queries.get_all_rejected_links()
elif self.sort == "live_promos" and self.sr:
return self.live_by_subreddit(self.sr)
elif self.sort == "live_promos":
return queries.get_all_live_links()
elif self.sort == "edited_live_promos":
return queries.get_all_edited_live_links()
elif self.sort == "underdelivered":
q = queries.get_underdelivered_campaigns()
campaigns = PromoCampaign._by_fullname(
list(q), data=True, return_dict=False
)
link_ids = [camp.link_id for camp in campaigns]
return [Link._fullname_from_id36(to36(id)) for id in link_ids]
elif self.sort == "reported":
return queries.get_reported_links(Subreddit.get_promote_srid())
elif self.sort == "fraud":
return queries.get_payment_flagged_links()
elif self.sort == "house":
return self.get_house_link_names()
elif self.sort == "all":
return queries.get_all_promoted_links()
def listing(self):
"""For sponsors, update wrapped links to include their campaigns."""
pane = super(self.__class__, self).listing()
if c.user_is_sponsor:
link_ids = {item._id for item in pane.things}
campaigns = PromoCampaign._by_link(link_ids)
campaigns_by_link = defaultdict(list)
for camp in campaigns:
campaigns_by_link[camp.link_id].append(camp)
for item in pane.things:
campaigns = campaigns_by_link[item._id]
item.campaigns = RenderableCampaign.from_campaigns(
item, campaigns, full_details=False
)
item.cachable = False
item.show_campaign_summary = True
return pane
@validate(
VSponsorAdmin(),
srname=nop("sr"),
include_managed=VBoolean("include_managed"),
exclude_unpaid=VBoolean("exclude_unpaid"),
)
def GET_listing(
self, srname=None, include_managed=False, exclude_unpaid=None, sort="all", **kw
):
self.sort = sort
self.sr = None
self.include_managed = include_managed
if "exclude_unpaid" not in request.GET:
self.exclude_unpaid = self.sort == "fraud"
else:
self.exclude_unpaid = exclude_unpaid
if srname:
try:
self.sr = Subreddit._by_name(srname)
except NotFound:
pass
return ListingController.GET_listing(self, **kw)
def allowed_location_and_target(location, target):
if c.user_is_sponsor or feature.is_enabled("ads_auction"):
return True
# regular users can only use locations when targeting frontpage
is_location = location and location.country
is_frontpage = not target.is_collection and target.subreddit_name == Frontpage.name
return not is_location or is_frontpage
class PromoteApiController(ApiController):
@json_validate(
sr=VSubmitSR("sr", promotion=True),
collection=VCollection("collection"),
location=VLocation(),
start=VDate("startdate"),
end=VDate("enddate"),
platform=VOneOf("platform", ("mobile", "desktop", "all"), default="all"),
)
def GET_check_inventory(
self, responder, sr, collection, location, start, end, platform
):
if collection:
target = Target(collection)
sr = None
else:
sr = sr or Frontpage
target = Target(sr.name)
if not allowed_location_and_target(location, target):
return abort(403, "forbidden")
available = inventory.get_available_pageviews(
target, start, end, location=location, platform=platform, datestr=True
)
return {"inventory": available}
@validatedForm(
VSponsorAdmin(),
VModhash(),
link=VLink("link_id36"),
campaign=VPromoCampaign("campaign_id36"),
)
def POST_freebie(self, form, jquery, link, campaign):
if not link or not campaign or link._id != campaign.link_id:
return abort(404, "not found")
if campaign_has_oversold_error(form, campaign):
form.set_text(".freebie", _("target oversold, can't freebie"))
return
if promote.is_promo(link) and campaign:
promote.free_campaign(link, campaign, c.user)
form.redirect(promote.promo_edit_url(link))
@validatedForm(VSponsorAdmin(), VModhash(), link=VByName("link"), note=nop("note"))
def POST_promote_note(self, form, jquery, link, note):
if promote.is_promo(link):
text = PromotionLog.add(link, note)
form.find(".notes").children(":last").after(format_html("<p>%s</p>", text))
@validatedForm(
VSponsorAdmin(),
VModhash(),
thing=VByName("thing_id"),
is_fraud=VBoolean("fraud"),
)
def POST_review_fraud(self, form, jquery, thing, is_fraud):
if not thing or not getattr(thing, "promoted", False):
return
promote.review_fraud(thing, is_fraud)
button = jquery(".id-%s .fraud-button" % thing._fullname)
button.text(_("fraud" if is_fraud else "not fraud"))
form.parents(".link").fadeOut()
@noresponse(VSponsorAdmin(), VModhash(), thing=VByName("id"))
def POST_promote(self, thing):
if promote.is_promo(thing):
promote.accept_promotion(thing)
@noresponse(VSponsorAdmin(), VModhash(), thing=VByName("id"), reason=nop("reason"))
def POST_unpromote(self, thing, reason):
if promote.is_promo(thing):
promote.reject_promotion(thing, reason=reason)
@validatedForm(
VSponsorAdmin(),
VModhash(),
link=VLink("link"),
campaign=VPromoCampaign("campaign"),
)
def POST_refund_campaign(self, form, jquery, link, campaign):
if not link or not campaign or link._id != campaign.link_id:
return abort(404, "not found")
# If created before switch to auction, use old billing method
if hasattr(campaign, "cpm"):
billable_impressions = promote.get_billable_impressions(campaign)
billable_amount = promote.get_billable_amount(
campaign, billable_impressions
)
refund_amount = promote.get_refund_amount(campaign, billable_amount)
# Otherwise, use adserver_spent_pennies
else:
billable_amount = campaign.total_budget_pennies / 100.0
refund_amount = billable_amount - (campaign.adserver_spent_pennies / 100.0)
billable_impressions = None
if refund_amount <= 0:
form.set_text(".status", _("refund not needed"))
return
if promote.refund_campaign(
link, campaign, refund_amount, billable_amount, billable_impressions
):
form.set_text(".status", _("refund succeeded"))
else:
form.set_text(".status", _("refund failed"))
@validatedForm(
VSponsor("link_id36"),
VModhash(),
VRatelimit(rate_user=True, rate_ip=True, prefix="create_promo_"),
VShamedDomain("url"),
username=VLength("username", 100, empty_error=None),
title=VTitle("title"),
url=VUrl("url", allow_self=False),
selftext=VMarkdownLength("text", max_length=40000),
kind=VOneOf("kind", ["link", "self"]),
disable_comments=VBoolean("disable_comments"),
sendreplies=VBoolean("sendreplies"),
media_url=VUrl("media_url", allow_self=False, valid_schemes=("http", "https")),
gifts_embed_url=VUrl(
"gifts_embed_url", allow_self=False, valid_schemes=("http", "https")
),
media_url_type=VOneOf("media_url_type", ("redditgifts", "scrape")),
media_autoplay=VBoolean("media_autoplay"),
media_override=VBoolean("media-override"),
domain_override=VLength("domain", 100),
third_party_tracking=VUrl("third_party_tracking"),
third_party_tracking_2=VUrl("third_party_tracking_2"),
is_managed=VBoolean("is_managed"),
)
def POST_create_promo(
self,
form,
jquery,
username,
title,
url,
selftext,
kind,
disable_comments,
sendreplies,
media_url,
media_autoplay,
media_override,
iframe_embed_url,
media_url_type,
domain_override,
third_party_tracking,
third_party_tracking_2,
is_managed,
):
images = _get_ads_images(c.user, data=True, meta=True)
return self._edit_promo(
form,
jquery,
username,
title,
url,
selftext,
kind,
disable_comments,
sendreplies,
media_url,
media_autoplay,
media_override,
iframe_embed_url,
media_url_type,
domain_override,
third_party_tracking,
third_party_tracking_2,
is_managed,
thumbnail=images.get("thumbnail", None),
mobile=images.get("mobile", None),
)
@validatedForm(
VSponsor("link_id36"),
VModhash(),
VRatelimit(rate_user=True, rate_ip=True, prefix="create_promo_"),
VShamedDomain("url"),
username=VLength("username", 100, empty_error=None),
title=VTitle("title"),
url=VUrl("url", allow_self=False),
selftext=VMarkdownLength("text", max_length=40000),
kind=VOneOf("kind", ["link", "self"]),
disable_comments=VBoolean("disable_comments"),
sendreplies=VBoolean("sendreplies"),
media_url=VUrl("media_url", allow_self=False, valid_schemes=("http", "https")),
gifts_embed_url=VUrl(
"gifts_embed_url", allow_self=False, valid_schemes=("http", "https")
),
media_url_type=VOneOf("media_url_type", ("redditgifts", "scrape")),
media_autoplay=VBoolean("media_autoplay"),
media_override=VBoolean("media-override"),
domain_override=VLength("domain", 100),
third_party_tracking=VUrl("third_party_tracking"),
third_party_tracking_2=VUrl("third_party_tracking_2"),
is_managed=VBoolean("is_managed"),
l=VLink("link_id36"),
)
def POST_edit_promo(
self,
form,
jquery,
username,
title,
url,
selftext,
kind,
disable_comments,
sendreplies,
media_url,
media_autoplay,
media_override,
iframe_embed_url,
media_url_type,
domain_override,
third_party_tracking,
third_party_tracking_2,
is_managed,
l,
):
images = _get_ads_images(l, data=True, meta=True)
return self._edit_promo(
form,
jquery,
username,
title,
url,
selftext,
kind,
disable_comments,
sendreplies,
media_url,
media_autoplay,
media_override,
iframe_embed_url,
media_url_type,
domain_override,
third_party_tracking,
third_party_tracking_2,
is_managed,
l=l,
thumbnail=images.get("thumbnail", None),
mobile=images.get("mobile", None),
)
def _edit_promo(
self,
form,
jquery,
username,
title,
url,
selftext,
kind,
disable_comments,
sendreplies,
media_url,
media_autoplay,
media_override,
iframe_embed_url,
media_url_type,
domain_override,
third_party_tracking,
third_party_tracking_2,
is_managed,
l=None,
thumbnail=None,
mobile=None,
):
should_ratelimit = False
is_self = kind == "self"
is_link = not is_self
is_new_promoted = not l
if not c.user_is_sponsor:
should_ratelimit = True
if not should_ratelimit:
c.errors.remove((errors.RATELIMIT, "ratelimit"))
# check for user override
if is_new_promoted and c.user_is_sponsor and username:
try:
user = Account._by_name(username)
except NotFound:
c.errors.add(errors.USER_DOESNT_EXIST, field="username")
form.set_error(errors.USER_DOESNT_EXIST, "username")
return
if not user.email:
c.errors.add(errors.NO_EMAIL_FOR_USER, field="username")
form.set_error(errors.NO_EMAIL_FOR_USER, "username")
return
if not user.email_verified:
c.errors.add(errors.NO_VERIFIED_EMAIL, field="username")
form.set_error(errors.NO_VERIFIED_EMAIL, "username")
return
else:
user = c.user
# check for shame banned domains
if form.has_errors("url", errors.DOMAIN_BANNED):
g.stats.simple_event("spam.shame.link")
return
# demangle URL in canonical way
if url:
if isinstance(url, (unicode, str)):
form.set_inputs(url=url)
elif isinstance(url, tuple) or isinstance(url[0], Link):
# there's already one or more links with this URL, but
# we're allowing mutliple submissions, so we really just
# want the URL
url = url[0].url
# Adzerk limits URLs length for creatives
if len(url) > ADZERK_URL_MAX_LENGTH:
c.errors.add(
errors.TOO_LONG,
field="url",
msg_params={"max_length": PROMO_URL_MAX_LENGTH},
)
if is_link:
if form.has_errors("url", errors.NO_URL, errors.BAD_URL, errors.TOO_LONG):
return
# users can change the disable_comments on promoted links
if (is_new_promoted or not promote.is_promoted(l)) and (
form.has_errors("title", errors.NO_TEXT, errors.TOO_LONG)
or jquery.has_errors("ratelimit", errors.RATELIMIT)
):
return
if is_self and form.has_errors("text", errors.TOO_LONG):
return
if is_new_promoted:
# creating a new promoted link
l = promote.new_promotion(
is_self=is_self,
title=title,
content=(selftext if is_self else url),
author=user,
ip=request.ip,
)
if c.user_is_sponsor:
l.managed_promo = is_managed
l.domain_override = domain_override or None
l.third_party_tracking = third_party_tracking or None
l.third_party_tracking_2 = third_party_tracking_2 or None
l._commit()
_force_images(l, thumbnail=thumbnail, mobile=mobile)
form.redirect(promote.promo_edit_url(l))
elif not promote.is_promo(l):
return
changed = False
if title and title != l.title:
l.title = title
changed = True
if _force_images(l, thumbnail=thumbnail, mobile=mobile):
changed = True
# type changing
if is_self != l.is_self:
l.set_content(is_self, selftext if is_self else url)
changed = True
if is_link and url and url != l.url:
l.url = url
changed = True
# only trips if changed by a non-sponsor
if changed and not c.user_is_sponsor and promote.is_promoted(l):
promote.edited_live_promotion(l)
# selftext can be changed at any time
if is_self:
l.selftext = selftext
# comment disabling and sendreplies is free to be changed any time.
l.disable_comments = disable_comments
l.sendreplies = sendreplies
if c.user_is_sponsor:
if form.has_errors("media_url", errors.BAD_URL) or form.has_errors(
"gifts_embed_url", errors.BAD_URL
):
return
scraper_embed = media_url_type == "scrape"
media_url = media_url or None
gifts_embed_url = gifts_embed_url or None
if c.user_is_sponsor and scraper_embed and media_url != l.media_url:
if media_url:
scraped = media._scrape_media(
media_url,
autoplay=media_autoplay,
save_thumbnail=False,
use_cache=True,
)
if scraped:
l.set_media_object(scraped.media_object)
l.set_secure_media_object(scraped.secure_media_object)
l.media_url = media_url
l.gifts_embed_url = None
l.media_autoplay = media_autoplay
else:
c.errors.add(errors.SCRAPER_ERROR, field="media_url")
form.set_error(errors.SCRAPER_ERROR, "media_url")
return
else:
l.set_media_object(None)
l.set_secure_media_object(None)
l.media_url = None
l.gifts_embed_url = None
l.media_autoplay = False
if (
c.user_is_sponsor
and not scraper_embed
and gifts_embed_url != l.gifts_embed_url
):
if gifts_embed_url:
parsed = UrlParser(gifts_embed_url)
if not is_subdomain(parsed.hostname, "redditgifts.com"):
c.errors.add(errors.BAD_URL, field="gifts_embed_url")
form.set_error(errors.BAD_URL, "gifts_embed_url")
return
sandbox = (
"allow-popups",
"allow-forms",
"allow-same-origin",
"allow-scripts",
)
iframe_attributes = {
"embed_url": websafe(iframe_embed_url),
"sandbox": " ".join(sandbox),
}
iframe = (
"""
<iframe class="redditgifts-embed"
src="%(embed_url)s"
width="710" height="500" scrolling="no"
frameborder="0" allowfullscreen
sandbox="%(sandbox)s">
</iframe>
"""
% iframe_attributes
)
media_object = {
"oembed": {
"description": "redditgifts embed",
"height": 500,
"html": iframe,
"provider_name": "redditgifts",
"provider_url": "http://www.redditgifts.com/",
"title": "redditgifts secret santa 2014",
"type": "rich",
"width": 710,
},
"type": "redditgifts",
}
l.set_media_object(media_object)
l.set_secure_media_object(media_object)
l.media_url = None
l.gifts_embed_url = gifts_embed_url
l.media_autoplay = False
else:
l.set_media_object(None)
l.set_secure_media_object(None)
l.media_url = None
l.gifts_embed_url = None
l.media_autoplay = False
if c.user_is_sponsor:
l.media_override = media_override
l.domain_override = domain_override or None
l.third_party_tracking = third_party_tracking or None
l.third_party_tracking_2 = third_party_tracking_2 or None
l.managed_promo = is_managed
l._commit()
# ensure plugins are notified of the final edits to the link.
# other methods also call this hook earlier in the process.
# see: `promote.unapprove_promotion`
if not is_new_promoted:
hooks.get_hook("promote.edit_promotion").call(link=l)
# clean up so the same images don't reappear if they create
# another link
_clear_ads_images(thing=c.user if is_new_promoted else l)
form.redirect(promote.promo_edit_url(l))
def _lowest_max_cpm_bid_dollars(
self, total_budget_dollars, bid_dollars, start, end
):
"""
Calculate the lower between g.max_bid_pennies
and maximum bid per day by budget
"""
ndays = (to_date(end) - to_date(start)).days
max_daily_bid = total_budget_dollars / ndays
max_bid_dollars = g.max_bid_pennies / 100.0
return min(max_daily_bid, max_bid_dollars)
@validatedForm(
VSponsor("link_id36"),
VModhash(),
is_auction=VBoolean("is_auction"),
start=VDate("startdate", required=False),
end=VDate("enddate"),
link=VLink("link_id36"),
target=VPromoTarget(),
campaign_id36=nop("campaign_id36"),
frequency_cap=VFrequencyCap(
("frequency_capped", "frequency_cap"),
),
priority=VPriority("priority"),
location=VLocation(),
platform=VOneOf("platform", ("mobile", "desktop", "all"), default="desktop"),
mobile_os=VList("mobile_os", choices=["iOS", "Android"]),
os_versions=VOneOf("os_versions", ("all", "filter"), default="all"),
ios_devices=VList("ios_device", choices=IOS_DEVICES),
android_devices=VList("android_device", choices=ANDROID_DEVICES),
ios_versions=VOSVersion("ios_version_range", "ios"),
android_versions=VOSVersion("android_version_range", "android"),
total_budget_dollars=VFloat("total_budget_dollars", coerce=False),
cost_basis=VOneOf(
"cost_basis",
(
"cpc",
"cpm",
),
default=None,
),
bid_dollars=VFloat("bid_dollars", coerce=True),
)
def POST_edit_campaign(
self,
form,
jquery,
is_auction,
link,
campaign_id36,
start,
end,
target,
frequency_cap,
priority,
location,
platform,
mobile_os,
os_versions,
ios_devices,
ios_versions,
android_devices,
android_versions,
total_budget_dollars,
cost_basis,
bid_dollars,
):
if not link:
return
if form.has_errors(
"frequency_cap", errors.INVALID_FREQUENCY_CAP
) or form.has_errors("frequency_cap", errors.FREQUENCY_CAP_TOO_LOW):
return
if not target:
# run form.has_errors to populate the errors in the response
form.has_errors(
"sr",
errors.SUBREDDIT_NOEXIST,
errors.SUBREDDIT_NOTALLOWED,
errors.SUBREDDIT_REQUIRED,
)
form.has_errors("collection", errors.COLLECTION_NOEXIST)
form.has_errors("targeting", errors.INVALID_TARGET)
return
if form.has_errors("location", errors.INVALID_LOCATION):
return
if not allowed_location_and_target(location, target):
return abort(403, "forbidden")
if form.has_errors("startdate", errors.BAD_DATE) or form.has_errors(
"enddate", errors.BAD_DATE
):
return
if not campaign_id36 and not start:
c.errors.add(errors.BAD_DATE, field="startdate")
form.set_error("startdate", errors.BAD_DATE)
if not feature.is_enabled("mobile_targeting") and platform != "desktop":
return abort(403, "forbidden")
if link.over_18 and not target.over_18:
c.errors.add(errors.INVALID_NSFW_TARGET, field="targeting")
form.has_errors("targeting", errors.INVALID_NSFW_TARGET)
return
if not feature.is_enabled("cpc_pricing"):
cost_basis = "cpm"
# Setup campaign details for existing campaigns
campaign = None
if campaign_id36:
try:
campaign = PromoCampaign._byID36(campaign_id36, data=True)
except NotFound:
pass
if not campaign or (campaign._deleted or link._id != campaign.link_id):
return abort(404, "not found")
requires_reapproval = False
is_live = promote.is_live_promo(link, campaign)
is_complete = promote.is_complete_promo(link, campaign)
if not c.user_is_sponsor:
# If campaign is live, start_date and total_budget_dollars
# must not be changed
if is_live:
start = campaign.start_date
total_budget_dollars = campaign.total_budget_dollars
# Configure priority, cost_basis, and bid_pennies
if feature.is_enabled("ads_auction"):
if c.user_is_sponsor:
if is_auction:
priority = PROMOTE_PRIORITIES["auction"]
cost_basis = PROMOTE_COST_BASIS[cost_basis]
else:
cost_basis = PROMOTE_COST_BASIS.fixed_cpm
else:
# if non-sponsor, is_auction is not part of the POST request,
# so must be set independently
is_auction = True
priority = PROMOTE_PRIORITIES["auction"]
cost_basis = PROMOTE_COST_BASIS[cost_basis]
# Error if bid is outside acceptable range
min_bid_dollars = g.min_bid_pennies / 100.0
max_bid_dollars = self._lowest_max_bid_dollars(
total_budget_dollars=total_budget_dollars,
bid_dollars=bid_dollars,
start=start,
end=end,
)
if bid_dollars < min_bid_dollars or bid_dollars > max_bid_dollars:
c.errors.add(
errors.BAD_BID,
field="bid",
msg_params={
"min": "%.2f" % round(min_bid_dollars, 2),
"max": "%.2f" % round(max_bid_dollars, 2),
},
)
form.has_errors("bid", errors.BAD_BID)
return
else:
cost_basis = PROMOTE_COST_BASIS.fixed_cpm
if priority == PROMOTE_PRIORITIES["auction"]:
bid_pennies = bid_dollars * 100
else:
link_owner = Account._byID(link.author_id)
bid_pennies = PromotionPrices.get_price(link_owner, target, location)
if platform == "desktop":
mobile_os = None
else:
# check if platform includes mobile, but no mobile OS is selected
if not mobile_os:
c.errors.add(errors.BAD_PROMO_MOBILE_OS, field="mobile_os")
form.set_error(errors.BAD_PROMO_MOBILE_OS, "mobile_os")
return
elif os_versions == "filter":
# check if OS is selected, but OS devices are not
if ("iOS" in mobile_os and not ios_devices) or (
"Android" in mobile_os and not android_devices
):
c.errors.add(errors.BAD_PROMO_MOBILE_DEVICE, field="os_versions")
form.set_error(errors.BAD_PROMO_MOBILE_DEVICE, "os_versions")
return
# check if OS versions are invalid
if form.has_errors("os_version", errors.INVALID_OS_VERSION):
c.errors.add(errors.INVALID_OS_VERSION, field="os_version")
form.set_error(errors.INVALID_OS_VERSION, "os_version")
return
min_start, max_start, max_end = promote.get_date_limits(link, c.user_is_sponsor)
if campaign:
if feature.is_enabled("ads_auction"):
# non-sponsors cannot update fixed CPM campaigns,
# even if they haven't launched (due to auction)
if not c.user_is_sponsor and not campaign.is_auction:
c.errors.add(errors.COST_BASIS_CANNOT_CHANGE, field="cost_basis")
form.set_error(errors.COST_BASIS_CANNOT_CHANGE, "cost_basis")
return
if not c.user_is_sponsor:
# If target is changed, require reapproval
if campaign.target != target:
requires_reapproval = True
if campaign.start_date.date() != start.date():
# Can't edit the start date of campaigns that have served
if campaign.has_served:
c.errors.add(errors.START_DATE_CANNOT_CHANGE, field="startdate")
form.has_errors("startdate", errors.START_DATE_CANNOT_CHANGE)
return
if is_live or is_complete:
c.errors.add(errors.START_DATE_CANNOT_CHANGE, field="startdate")
form.has_errors("startdate", errors.START_DATE_CANNOT_CHANGE)
return
elif start.date() < min_start:
c.errors.add(
errors.DATE_TOO_EARLY,
msg_params={"day": min_start.strftime("%m/%d/%Y")},
field="startdate",
)
form.has_errors("startdate", errors.DATE_TOO_EARLY)
return
if start.date() > max_start:
c.errors.add(
errors.DATE_TOO_LATE,
msg_params={"day": max_start.strftime("%m/%d/%Y")},
field="startdate",
)
form.has_errors("startdate", errors.DATE_TOO_LATE)
return
if end.date() > max_end:
c.errors.add(
errors.DATE_TOO_LATE,
msg_params={"day": max_end.strftime("%m/%d/%Y")},
field="enddate",
)
form.has_errors("enddate", errors.DATE_TOO_LATE)
return
if end < start:
c.errors.add(errors.BAD_DATE_RANGE, field="enddate")
form.has_errors("enddate", errors.BAD_DATE_RANGE)
return
# Limit the number of PromoCampaigns a Link can have
# Note that the front end should prevent the user from getting
# this far
existing_campaigns = list(PromoCampaign._by_link(link._id))
if len(existing_campaigns) > g.MAX_CAMPAIGNS_PER_LINK:
c.errors.add(
errors.TOO_MANY_CAMPAIGNS,
msg_params={"count": g.MAX_CAMPAIGNS_PER_LINK},
field="title",
)
form.has_errors("title", errors.TOO_MANY_CAMPAIGNS)
return
if not priority == PROMOTE_PRIORITIES["house"]:
# total_budget_dollars is submitted as a float;
# convert it to pennies
total_budget_pennies = int(total_budget_dollars * 100)
if c.user_is_sponsor:
min_total_budget_pennies = 0
max_total_budget_pennies = 0
else:
min_total_budget_pennies = g.min_total_budget_pennies
max_total_budget_pennies = g.max_total_budget_pennies
if (
total_budget_pennies is None
or total_budget_pennies < min_total_budget_pennies
or (
max_total_budget_pennies
and total_budget_pennies > max_total_budget_pennies
)
):
c.errors.add(
errors.BAD_BUDGET,
field="total_budget_dollars",
msg_params={
"min": min_total_budget_pennies,
"max": max_total_budget_pennies or g.max_total_budget_pennies,
},
)
form.has_errors("total_budget_dollars", errors.BAD_BUDGET)
return
# you cannot edit the bid of a live ad unless it's a freebie
if (
campaign
and total_budget_pennies != campaign.total_budget_pennies
and promote.is_live_promo(link, campaign)
and not campaign.is_freebie()
):
c.errors.add(errors.BUDGET_LIVE, field="total_budget_dollars")
form.has_errors("total_budget_dollars", errors.BUDGET_LIVE)
return
else:
total_budget_pennies = 0
# Check inventory
campaign = campaign if campaign_id36 else None
if not priority.inventory_override:
oversold = has_oversold_error(
form,
campaign,
start,
end,
total_budget_pennies,
bid_pennies,
target,
location,
)
if oversold:
return
# Always set frequency_cap_default for auction campaign if frequency_cap
# is not set
if not frequency_cap and is_auction:
frequency_cap = g.frequency_cap_default
dates = (start, end)
campaign_dict = {
"dates": dates,
"target": target,
"frequency_cap": frequency_cap,
"priority": priority,
"location": location,
"total_budget_pennies": total_budget_pennies,
"cost_basis": cost_basis,
"bid_pennies": bid_pennies,
"platform": platform,
"mobile_os": mobile_os,
"ios_devices": ios_devices,
"ios_version_range": ios_versions,
"android_devices": android_devices,
"android_version_range": android_versions,
}
if campaign:
if requires_reapproval and promote.is_accepted(link):
campaign_dict["is_approved"] = False
promote.edit_campaign(link, campaign, **campaign_dict)
else:
campaign = promote.new_campaign(link, **campaign_dict)
rc = RenderableCampaign.from_campaigns(link, campaign)
jquery.update_campaign(campaign._fullname, rc.render_html())
@validatedForm(
VSponsor("link_id36"),
VModhash(),
l=VLink("link_id36"),
campaign=VPromoCampaign("campaign_id36"),
)
def POST_delete_campaign(self, form, jquery, l, campaign):
if not campaign or not l or l._id != campaign.link_id:
return abort(404, "not found")
promote.delete_campaign(l, campaign)
@validatedForm(
VSponsor("link_id36"),
VModhash(),
link=VLink("link_id36"),
campaign=VPromoCampaign("campaign_id36"),
should_pause=VBoolean("should_pause"),
)
def POST_toggle_pause_campaign(
self, form, jquery, link, campaign, should_pause=False
):
if (
not link
or not campaign
or link._id != campaign.link_id
or not feature.is_enabled("pause_ads")
):
return abort(404, "not found")
if campaign.paused == should_pause:
return
promote.toggle_pause_campaign(link, campaign, should_pause)
rc = RenderableCampaign.from_campaigns(link, campaign)
jquery.update_campaign(campaign._fullname, rc.render_html())
@validatedForm(
VSponsorAdmin(),
VModhash(),
link=VLink("link_id36"),
campaign=VPromoCampaign("campaign_id36"),
)
def POST_terminate_campaign(self, form, jquery, link, campaign):
if not link or not campaign or link._id != campaign.link_id:
return abort(404, "not found")
promote.terminate_campaign(link, campaign)
rc = RenderableCampaign.from_campaigns(link, campaign)
jquery.update_campaign(campaign._fullname, rc.render_html())
@validatedForm(
VVerifiedSponsor("link"),
VModhash(),
link=VByName("link"),
campaign=VPromoCampaign("campaign"),
customer_id=VInt("customer_id", min=0),
pay_id=VInt("account", min=0),
edit=VBoolean("edit"),
address=ValidAddress(
[
"firstName",
"lastName",
"company",
"address",
"city",
"state",
"zip",
"country",
"phoneNumber",
]
),
creditcard=ValidCard(["cardNumber", "expirationDate", "cardCode"]),
)
def POST_update_pay(
self,
form,
jquery,
link,
campaign,
customer_id,
pay_id,
edit,
address,
creditcard,
):
def _handle_failed_payment(reason=None):
promote.failed_payment_method(c.user, link)
msg = reason or _("failed to authenticate card. sorry.")
form.set_text(".status", msg)
if not g.authorizenetapi:
return
if not link or not campaign or link._id != campaign.link_id:
return abort(404, "not found")
# Check inventory
if not campaign.is_auction:
if campaign_has_oversold_error(form, campaign):
return
# check the campaign dates are still valid (user may have created
# the campaign a few days ago)
min_start, max_start, max_end = promote.get_date_limits(link, c.user_is_sponsor)
if campaign.start_date.date() > max_start:
msg = _("please change campaign start date to %(date)s or earlier")
date = format_date(max_start, format="short", locale=c.locale)
msg %= {"date": date}
form.set_text(".status", msg)
return
if campaign.start_date.date() < min_start:
msg = _("please change campaign start date to %(date)s or later")
date = format_date(min_start, format="short", locale=c.locale)
msg %= {"date": date}
form.set_text(".status", msg)
return
new_payment = not pay_id
address_modified = new_payment or edit
if address_modified:
address_fields = [
"firstName",
"lastName",
"company",
"address",
"city",
"state",
"zip",
"country",
"phoneNumber",
]
card_fields = ["cardNumber", "expirationDate", "cardCode"]
if form.has_errors(address_fields, errors.BAD_ADDRESS) or form.has_errors(
card_fields, errors.BAD_CARD
):
return
try:
pay_id = add_or_update_payment_method(
c.user, address, creditcard, pay_id
)
if pay_id:
promote.new_payment_method(
user=c.user, ip=request.ip, address=address, link=link
)
except AuthorizeNetException:
_handle_failed_payment()
return
if pay_id:
success, reason = promote.auth_campaign(link, campaign, c.user, pay_id)
if success:
hooks.get_hook("promote.campaign_paid").call(
link=link, campaign=campaign
)
if not address and g.authorizenetapi:
profiles = get_or_create_customer_profile(c.user).paymentProfiles
profile = {p.customerPaymentProfileId: p for p in profiles}[pay_id]
address = profile.billTo
promote.successful_payment(link, campaign, request.ip, address)
jquery.payment_redirect(
promote.promo_edit_url(link),
new_payment,
campaign.total_budget_pennies,
)
return
else:
_handle_failed_payment(reason)
else:
_handle_failed_payment()
@json_validate(
VSponsor("link"),
VModhash(),
link=VLink("link"),
kind=VOneOf("kind", ["thumbnail", "mobile"]),
filepath=nop("filepath"),
ajax=VBoolean("ajax", default=True),
)
def POST_ad_s3_params(self, responder, link, kind, filepath, ajax):
filename, ext = os.path.splitext(filepath)
mime_type, encoding = mimetypes.guess_type(filepath)
if not mime_type or mime_type not in ALLOWED_IMAGE_TYPES:
request.environ["extra_error_data"] = {
"message": _("image must be a jpg or png"),
}
abort(403)
keyspace = _get_ads_keyspace(link if link else c.user)
key = os.path.join(keyspace, kind)
redirect = None
if not ajax:
now = datetime.now().replace(tzinfo=g.tz)
signature = _get_callback_hmac(
username=c.user.name,
key=key,
expires=now,
)
path = "/api/ad_s3_callback?hmac=%s&ts=%s" % (
signature,
_format_expires(now),
)
redirect = add_sr(path, sr_path=False)
return s3_helpers.get_post_args(
bucket=g.s3_client_uploads_bucket,
key=key,
success_action_redirect=redirect,
success_action_status="201",
content_type=mime_type,
meta={
"x-amz-meta-ext": ext,
},
)
@validate(
VSponsor(),
expires=VDate("ts", format=EXPIRES_DATE_FORMAT),
signature=VPrintable("hmac", 255),
callback=nop("callback"),
key=nop("key"),
)
def GET_ad_s3_callback(self, expires, signature, callback, key):
now = datetime.now(tz=g.tz)
if expires + timedelta(minutes=10) < now:
self.abort404()
expected_mac = _get_callback_hmac(
username=c.user.name,
key=key,
expires=expires,
)
if not constant_time_compare(signature, expected_mac):
self.abort404()
template = "<script>parent.__s3_callbacks__[%(callback)s](%(data)s);</script>"
image = _key_to_dict(s3_helpers.get_key(g.s3_client_uploads_bucket, key))
response = {
"callback": scriptsafe_dumps(callback),
"data": scriptsafe_dumps(image),
}
return format_html(template, response)
|
TemplatePyMod | FeaturePython | """
Examples for a feature class and its view provider.
(c) 2009 Werner Mayer LGPL
"""
__author__ = "Werner Mayer <wmayer@users.sourceforge.net>"
import math
import FreeCAD
import Part
from FreeCAD import Base
from pivy import coin
class PartFeature:
def __init__(self, obj):
obj.Proxy = self
class Box(PartFeature):
def __init__(self, obj):
PartFeature.__init__(self, obj)
""" Add some custom properties to our box feature """
obj.addProperty(
"App::PropertyLength", "Length", "Box", "Length of the box"
).Length = 1.0
obj.addProperty(
"App::PropertyLength", "Width", "Box", "Width of the box"
).Width = 1.0
obj.addProperty(
"App::PropertyLength", "Height", "Box", "Height of the box"
).Height = 1.0
def onChanged(self, fp, prop):
"""Print the name of the property that has changed"""
FreeCAD.Console.PrintMessage("Change property: " + str(prop) + "\n")
def execute(self, fp):
"""Print a short message when doing a recomputation, this method is mandatory"""
FreeCAD.Console.PrintMessage("Recompute Python Box feature\n")
fp.Shape = Part.makeBox(fp.Length, fp.Width, fp.Height)
class ViewProviderBox:
def __init__(self, obj):
"""Set this object to the proxy object of the actual view provider"""
obj.Proxy = self
def attach(self, obj):
"""Setup the scene sub-graph of the view provider, this method is mandatory"""
return
def updateData(self, fp, prop):
"""If a property of the handled feature has changed we have the chance to handle this here"""
return
def getDisplayModes(self, obj):
"""Return a list of display modes."""
modes = []
return modes
def getDefaultDisplayMode(self):
"""Return the name of the default display mode. It must be defined in getDisplayModes."""
return "Shaded"
def setDisplayMode(self, mode):
"""Map the display mode defined in attach with those defined in getDisplayModes.
Since they have the same names nothing needs to be done. This method is optional.
"""
return mode
def onChanged(self, vp, prop):
"""Print the name of the property that has changed"""
FreeCAD.Console.PrintMessage("Change property: " + str(prop) + "\n")
def getIcon(self):
"""Return the icon in XMP format which will appear in the tree view. This method is optional
and if not defined a default icon is shown.
"""
return """
/* XPM */
static const char * ViewProviderBox_xpm[] = {
"16 16 6 1",
" c None",
". c #141010",
"+ c #615BD2",
"@ c #C39D55",
"# c #000000",
"$ c #57C355",
" ........",
" ......++..+..",
" .@@@@.++..++.",
" .@@@@.++..++.",
" .@@ .++++++.",
" ..@@ .++..++.",
"###@@@@ .++..++.",
"##$.@@$#.++++++.",
"#$#$.$$$........",
"#$$####### ",
"#$$#$$$$$# ",
"#$$#$$$$$# ",
"#$$#$$$$$# ",
" #$#$$$$$# ",
" ##$$$$$# ",
" ####### "};
"""
def __getstate__(self):
"""When saving the document this object gets stored using Python's cPickle module.
Since we have some un-pickable here -- the Coin stuff -- we must define this method
to return a tuple of all pickable objects or None.
"""
return None
def __setstate__(self, state):
"""When restoring the pickled object from document we have the chance to set some
internals here. Since no data were pickled nothing needs to be done here.
"""
return None
def makeBox():
doc = FreeCAD.newDocument()
a = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", "Box")
Box(a)
ViewProviderBox(a.ViewObject)
doc.recompute()
# -----------------------------------------------------------------------------
class Line:
def __init__(self, obj):
"""Add two point properties"""
obj.addProperty("App::PropertyVector", "p1", "Line", "Start point")
obj.addProperty(
"App::PropertyVector", "p2", "Line", "End point"
).p2 = FreeCAD.Vector(1, 0, 0)
obj.Proxy = self
def execute(self, fp):
"""Print a short message when doing a recomputation, this method is mandatory"""
fp.Shape = Part.makeLine(fp.p1, fp.p2)
class ViewProviderLine:
def __init__(self, obj):
"""Set this object to the proxy object of the actual view provider"""
obj.Proxy = self
def getDefaultDisplayMode(self):
"""Return the name of the default display mode. It must be defined in getDisplayModes."""
return "Flat Lines"
def makeLine():
doc = FreeCAD.newDocument()
a = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", "Line")
Line(a)
# ViewProviderLine(a.ViewObject)
a.ViewObject.Proxy = 0 # just set it to something different from None
doc.recompute()
# -----------------------------------------------------------------------------
class Octahedron:
def __init__(self, obj):
"Add some custom properties to our box feature"
obj.addProperty(
"App::PropertyLength", "Length", "Octahedron", "Length of the octahedron"
).Length = 1.0
obj.addProperty(
"App::PropertyLength", "Width", "Octahedron", "Width of the octahedron"
).Width = 1.0
obj.addProperty(
"App::PropertyLength", "Height", "Octahedron", "Height of the octahedron"
).Height = 1.0
obj.addProperty(
"Part::PropertyPartShape", "Shape", "Octahedron", "Shape of the octahedron"
)
obj.Proxy = self
def execute(self, fp):
# Define six vetices for the shape
v1 = FreeCAD.Vector(0, 0, 0)
v2 = FreeCAD.Vector(fp.Length, 0, 0)
v3 = FreeCAD.Vector(0, fp.Width, 0)
v4 = FreeCAD.Vector(fp.Length, fp.Width, 0)
v5 = FreeCAD.Vector(fp.Length / 2, fp.Width / 2, fp.Height / 2)
v6 = FreeCAD.Vector(fp.Length / 2, fp.Width / 2, -fp.Height / 2)
# Make the wires/faces
f1 = self.make_face(v2, v1, v5)
f2 = self.make_face(v4, v2, v5)
f3 = self.make_face(v3, v4, v5)
f4 = self.make_face(v1, v3, v5)
f5 = self.make_face(v1, v2, v6)
f6 = self.make_face(v2, v4, v6)
f7 = self.make_face(v4, v3, v6)
f8 = self.make_face(v3, v1, v6)
shell = Part.makeShell([f1, f2, f3, f4, f5, f6, f7, f8])
solid = Part.makeSolid(shell)
fp.Shape = solid
# helper method to create the faces
def make_face(self, v1, v2, v3):
wire = Part.makePolygon([v1, v2, v3, v1])
face = Part.Face(wire)
return face
class ViewProviderOctahedron:
def __init__(self, obj):
"Set this object to the proxy object of the actual view provider"
obj.addProperty(
"App::PropertyColor", "Color", "Octahedron", "Color of the octahedron"
).Color = (1.0, 0.0, 0.0)
obj.Proxy = self
def attach(self, obj):
"Setup the scene sub-graph of the view provider, this method is mandatory"
self.shaded = coin.SoGroup()
self.wireframe = coin.SoGroup()
self.color = coin.SoBaseColor()
self.data = coin.SoCoordinate3()
self.face = coin.SoIndexedFaceSet()
self.shaded.addChild(self.color)
self.shaded.addChild(self.data)
self.shaded.addChild(self.face)
obj.addDisplayMode(self.shaded, "Shaded")
style = coin.SoDrawStyle()
style.style = coin.SoDrawStyle.LINES
self.wireframe.addChild(style)
self.wireframe.addChild(self.color)
self.wireframe.addChild(self.data)
self.wireframe.addChild(self.face)
obj.addDisplayMode(self.wireframe, "Wireframe")
self.onChanged(obj, "Color")
def updateData(self, fp, prop):
"If a property of the handled feature has changed we have the chance to handle this here"
# fp is the handled feature, prop is the name of the property that has changed
if prop == "Shape":
s = fp.getPropertyByName("Shape")
self.data.point.setNum(6)
cnt = 0
for i in s.Vertexes:
self.data.point.set1Value(cnt, i.X, i.Y, i.Z)
cnt = cnt + 1
self.face.coordIndex.set1Value(0, 0)
self.face.coordIndex.set1Value(1, 2)
self.face.coordIndex.set1Value(2, 1)
self.face.coordIndex.set1Value(3, -1)
self.face.coordIndex.set1Value(4, 3)
self.face.coordIndex.set1Value(5, 2)
self.face.coordIndex.set1Value(6, 0)
self.face.coordIndex.set1Value(7, -1)
self.face.coordIndex.set1Value(8, 4)
self.face.coordIndex.set1Value(9, 2)
self.face.coordIndex.set1Value(10, 3)
self.face.coordIndex.set1Value(11, -1)
self.face.coordIndex.set1Value(12, 1)
self.face.coordIndex.set1Value(13, 2)
self.face.coordIndex.set1Value(14, 4)
self.face.coordIndex.set1Value(15, -1)
self.face.coordIndex.set1Value(16, 1)
self.face.coordIndex.set1Value(17, 5)
self.face.coordIndex.set1Value(18, 0)
self.face.coordIndex.set1Value(19, -1)
self.face.coordIndex.set1Value(20, 0)
self.face.coordIndex.set1Value(21, 5)
self.face.coordIndex.set1Value(22, 3)
self.face.coordIndex.set1Value(23, -1)
self.face.coordIndex.set1Value(24, 3)
self.face.coordIndex.set1Value(25, 5)
self.face.coordIndex.set1Value(26, 4)
self.face.coordIndex.set1Value(27, -1)
self.face.coordIndex.set1Value(28, 4)
self.face.coordIndex.set1Value(29, 5)
self.face.coordIndex.set1Value(30, 1)
self.face.coordIndex.set1Value(31, -1)
def getDisplayModes(self, obj):
"Return a list of display modes."
modes = []
modes.append("Shaded")
modes.append("Wireframe")
return modes
def getDefaultDisplayMode(self):
"Return the name of the default display mode. It must be defined in getDisplayModes."
return "Shaded"
def setDisplayMode(self, mode):
return mode
def onChanged(self, vp, prop):
"Here we can do something when a single property got changed"
FreeCAD.Console.PrintMessage("Change property: " + str(prop) + "\n")
if prop == "Color":
c = vp.getPropertyByName("Color")
self.color.rgb.setValue(c[0], c[1], c[2])
def getIcon(self):
return """
/* XPM */
static const char * ViewProviderBox_xpm[] = {
"16 16 6 1",
" c None",
". c #141010",
"+ c #615BD2",
"@ c #C39D55",
"# c #000000",
"$ c #57C355",
" ........",
" ......++..+..",
" .@@@@.++..++.",
" .@@@@.++..++.",
" .@@ .++++++.",
" ..@@ .++..++.",
"###@@@@ .++..++.",
"##$.@@$#.++++++.",
"#$#$.$$$........",
"#$$####### ",
"#$$#$$$$$# ",
"#$$#$$$$$# ",
"#$$#$$$$$# ",
" #$#$$$$$# ",
" ##$$$$$# ",
" ####### "};
"""
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def makeOctahedron():
doc = FreeCAD.newDocument()
a = FreeCAD.ActiveDocument.addObject("App::FeaturePython", "Octahedron")
Octahedron(a)
ViewProviderOctahedron(a.ViewObject)
doc.recompute()
# -----------------------------------------------------------------------------
class PointFeature:
def __init__(self, obj):
obj.Proxy = self
def onChanged(self, fp, prop):
"""Print the name of the property that has changed"""
return
def execute(self, fp):
"""Print a short message when doing a recomputation, this method is mandatory"""
return
class ViewProviderPoints:
def __init__(self, obj):
"""Set this object to the proxy object of the actual view provider"""
obj.Proxy = self
def attach(self, obj):
"""Setup the scene sub-graph of the view provider, this method is mandatory"""
return
def updateData(self, fp, prop):
"""If a property of the handled feature has changed we have the chance to handle this here"""
return
def getDisplayModes(self, obj):
"""Return a list of display modes."""
modes = []
return modes
def getDefaultDisplayMode(self):
"""Return the name of the default display mode. It must be defined in getDisplayModes."""
return "Points"
def setDisplayMode(self, mode):
"""Map the display mode defined in attach with those defined in getDisplayModes.
Since they have the same names nothing needs to be done. This method is optional.
"""
return mode
def onChanged(self, vp, prop):
"""Print the name of the property that has changed"""
return
def getIcon(self):
"""Return the icon in XMP format which will appear in the tree view. This method is optional
and if not defined a default icon is shown.
"""
return """
/* XPM */
static const char * ViewProviderBox_xpm[] = {
"16 16 6 1",
" c None",
". c #141010",
"+ c #615BD2",
"@ c #C39D55",
"# c #000000",
"$ c #57C355",
" ........",
" ......++..+..",
" .@@@@.++..++.",
" .@@@@.++..++.",
" .@@ .++++++.",
" ..@@ .++..++.",
"###@@@@ .++..++.",
"##$.@@$#.++++++.",
"#$#$.$$$........",
"#$$####### ",
"#$$#$$$$$# ",
"#$$#$$$$$# ",
"#$$#$$$$$# ",
" #$#$$$$$# ",
" ##$$$$$# ",
" ####### "};
"""
def __getstate__(self):
"""When saving the document this object gets stored using Python's cPickle module.
Since we have some un-pickable here -- the Coin stuff -- we must define this method
to return a tuple of all pickable objects or None.
"""
return None
def __setstate__(self, state):
"""When restoring the pickled object from document we have the chance to set some
internals here. Since no data were pickled nothing needs to be done here.
"""
return None
def makePoints():
doc = FreeCAD.newDocument()
import Mesh
m = Mesh.createSphere(5.0).Points
import Points
p = Points.Points()
l = []
for s in m:
l.append(s.Vector)
p.addPoints(l)
a = FreeCAD.ActiveDocument.addObject("Points::FeaturePython", "Points")
a.Points = p
PointFeature(a)
ViewProviderPoints(a.ViewObject)
doc.recompute()
# -----------------------------------------------------------------------------
class MeshFeature:
def __init__(self, obj):
obj.Proxy = self
def onChanged(self, fp, prop):
"""Print the name of the property that has changed"""
return
def execute(self, fp):
"""Print a short message when doing a recomputation, this method is mandatory"""
return
class ViewProviderMesh:
def __init__(self, obj):
"""Set this object to the proxy object of the actual view provider"""
obj.Proxy = self
def attach(self, obj):
"""Setup the scene sub-graph of the view provider, this method is mandatory"""
return
def getDefaultDisplayMode(self):
"""Return the name of the default display mode. It must be defined in getDisplayModes."""
return "Shaded"
def getIcon(self):
"""Return the icon in XMP format which will appear in the tree view. This method is optional
and if not defined a default icon is shown.
"""
return """
/* XPM */
static const char * ViewProviderBox_xpm[] = {
"16 16 6 1",
" c None",
". c #141010",
"+ c #615BD2",
"@ c #C39D55",
"# c #000000",
"$ c #57C355",
" ........",
" ......++..+..",
" .@@@@.++..++.",
" .@@@@.++..++.",
" .@@ .++++++.",
" ..@@ .++..++.",
"###@@@@ .++..++.",
"##$.@@$#.++++++.",
"#$#$.$$$........",
"#$$####### ",
"#$$#$$$$$# ",
"#$$#$$$$$# ",
"#$$#$$$$$# ",
" #$#$$$$$# ",
" ##$$$$$# ",
" ####### "};
"""
def __getstate__(self):
"""When saving the document this object gets stored using Python's cPickle module.
Since we have some un-pickable here -- the Coin stuff -- we must define this method
to return a tuple of all pickable objects or None.
"""
return None
def __setstate__(self, state):
"""When restoring the pickled object from document we have the chance to set some
internals here. Since no data were pickled nothing needs to be done here.
"""
return None
def makeMesh():
doc = FreeCAD.newDocument()
import Mesh
a = FreeCAD.ActiveDocument.addObject("Mesh::FeaturePython", "Mesh")
a.Mesh = Mesh.createSphere(5.0)
MeshFeature(a)
ViewProviderMesh(a.ViewObject)
doc.recompute()
# -----------------------------------------------------------------------------
class Molecule:
def __init__(self, obj):
"""Add two point properties"""
obj.addProperty("App::PropertyVector", "p1", "Line", "Start point")
obj.addProperty(
"App::PropertyVector", "p2", "Line", "End point"
).p2 = FreeCAD.Vector(5, 0, 0)
obj.Proxy = self
def execute(self, fp):
"""Print a short message when doing a recomputation, this method is mandatory"""
fp.Shape = Part.makeLine(fp.p1, fp.p2)
class ViewProviderMolecule:
def __init__(self, obj):
"""Set this object to the proxy object of the actual view provider"""
sep1 = coin.SoSeparator()
self.trl1 = coin.SoTranslation()
sep1.addChild(self.trl1)
sep1.addChild(coin.SoSphere())
sep2 = coin.SoSeparator()
self.trl2 = coin.SoTranslation()
sep2.addChild(self.trl2)
sep2.addChild(coin.SoSphere())
obj.RootNode.addChild(sep1)
obj.RootNode.addChild(sep2)
# triggers an updateData call so the assignment at the end
obj.Proxy = self
def updateData(self, fp, prop):
"If a property of the handled feature has changed we have the chance to handle this here"
# fp is the handled feature, prop is the name of the property that has changed
if prop == "p1":
p = fp.getPropertyByName("p1")
self.trl1.translation = (p.x, p.y, p.z)
elif prop == "p2":
p = fp.getPropertyByName("p2")
self.trl2.translation = (p.x, p.y, p.z)
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def makeMolecule():
doc = FreeCAD.newDocument()
a = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", "Molecule")
Molecule(a)
ViewProviderMolecule(a.ViewObject)
doc.recompute()
# -----------------------------------------------------------------------------
class CircleSet:
def __init__(self, obj):
obj.addProperty("Part::PropertyPartShape", "Shape", "Circle", "Shape")
obj.Proxy = self
def execute(self, fp):
pass
class ViewProviderCircleSet:
def __init__(self, obj):
"""Set this object to the proxy object of the actual view provider"""
obj.Proxy = self
def attach(self, obj):
self.coords = coin.SoCoordinate3()
self.lines = coin.SoLineSet()
obj.RootNode.addChild(self.coords)
obj.RootNode.addChild(self.lines)
def updateData(self, fp, prop):
if prop == "Shape":
edges = fp.getPropertyByName("Shape").Edges
pts = []
ver = []
for i in edges:
length = i.Length
ver.append(10)
for j in range(10):
v = i.valueAt(j / 9.0 * length)
pts.append((v.x, v.y, v.z))
self.coords.point.setValues(pts)
self.lines.numVertices.setValues(ver)
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def makeCircleSet():
x = 0.5
comp = Part.Compound([])
for j in range(630):
y = 0.5
for i in range(630):
c = Part.makeCircle(0.1, Base.Vector(x, y, 0), Base.Vector(0, 0, 1))
# Part.show(c)
comp.add(c)
y = y + 0.5
x = x + 0.5
doc = FreeCAD.newDocument()
a = FreeCAD.ActiveDocument.addObject("App::FeaturePython", "Circles")
CircleSet(a)
ViewProviderCircleSet(a.ViewObject)
a.Shape = comp
doc.recompute()
# -----------------------------------------------------------------------------
class EnumTest:
def __init__(self, obj):
"""Add enum properties"""
obj.addProperty("App::PropertyEnumeration", "Enum", "", "Enumeration").Enum = [
"One",
"Two",
"Three",
]
obj.addProperty(
"App::PropertyEnumeration", "Enum2", "", "Enumeration2"
).Enum2 = ["One", "Two", "Three"]
obj.Proxy = self
def execute(self, fp):
return
class ViewProviderEnumTest:
def __init__(self, obj):
"""Set this object to the proxy object of the actual view provider"""
obj.addProperty(
"App::PropertyEnumeration", "Enum3", "", "Enumeration3"
).Enum3 = ["One", "Two", "Three"]
obj.addProperty(
"App::PropertyEnumeration", "Enum4", "", "Enumeration4"
).Enum4 = ["One", "Two", "Three"]
obj.Proxy = self
def updateData(self, fp, prop):
print("prop updated:", prop)
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def makeEnumTest():
FreeCAD.newDocument()
a = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", "Enum")
EnumTest(a)
ViewProviderEnumTest(a.ViewObject)
# -----------------------------------------------------------------------------
class DistanceBolt:
def __init__(self, obj):
"""Add the properties: Length, Edges, Radius, Height"""
obj.addProperty(
"App::PropertyInteger", "Edges", "Bolt", "Number of edges of the outline"
).Edges = 6
obj.addProperty(
"App::PropertyLength",
"Length",
"Bolt",
"Length of the edges of the outline",
).Length = 10.0
obj.addProperty(
"App::PropertyLength", "Radius", "Bolt", "Radius of the inner circle"
).Radius = 4.0
obj.addProperty(
"App::PropertyLength", "Height", "Bolt", "Height of the extrusion"
).Height = 20.0
obj.Proxy = self
def onChanged(self, fp, prop):
if prop == "Edges" or prop == "Length" or prop == "Radius" or prop == "Height":
self.execute(fp)
def execute(self, fp):
edges = fp.Edges
if edges < 3:
edges = 3
length = fp.Length
radius = fp.Radius
height = fp.Height
m = Base.Matrix()
m.rotateZ(math.radians(360.0 / edges))
# create polygon
polygon = []
v = Base.Vector(length, 0, 0)
for i in range(edges):
polygon.append(v)
v = m.multiply(v)
polygon.append(v)
wire = Part.makePolygon(polygon)
# create circle
circ = Part.makeCircle(radius)
# Create the face with the polygon as outline and the circle as hole
face = Part.Face([wire, Part.Wire(circ)])
# Extrude in z to create the final solid
extrude = face.extrude(Base.Vector(0, 0, height))
fp.Shape = extrude
def makeDistanceBolt():
doc = FreeCAD.newDocument()
bolt = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", "Distance_Bolt")
bolt.Label = "Distance bolt"
DistanceBolt(bolt)
bolt.ViewObject.Proxy = 0
doc.recompute()
|
extractor | giga | # coding: utf-8
from __future__ import unicode_literals
import itertools
from ..utils import compat_str, parse_duration, parse_iso8601, qualities, str_to_int
from .common import InfoExtractor
class GigaIE(InfoExtractor):
_VALID_URL = r"https?://(?:www\.)?giga\.de/(?:[^/]+/)*(?P<id>[^/]+)"
_TESTS = [
{
"url": "http://www.giga.de/filme/anime-awesome/trailer/anime-awesome-chihiros-reise-ins-zauberland-das-beste-kommt-zum-schluss/",
"md5": "6bc5535e945e724640664632055a584f",
"info_dict": {
"id": "2622086",
"display_id": "anime-awesome-chihiros-reise-ins-zauberland-das-beste-kommt-zum-schluss",
"ext": "mp4",
"title": "Anime Awesome: Chihiros Reise ins Zauberland – Das Beste kommt zum Schluss",
"description": "md5:afdf5862241aded4718a30dff6a57baf",
"thumbnail": r"re:^https?://.*\.jpg$",
"duration": 578,
"timestamp": 1414749706,
"upload_date": "20141031",
"uploader": "Robin Schweiger",
"view_count": int,
},
},
{
"url": "http://www.giga.de/games/channel/giga-top-montag/giga-topmontag-die-besten-serien-2014/",
"only_matching": True,
},
{
"url": "http://www.giga.de/extra/netzkultur/videos/giga-games-tom-mats-robin-werden-eigene-wege-gehen-eine-ankuendigung/",
"only_matching": True,
},
{
"url": "http://www.giga.de/tv/jonas-liest-spieletitel-eingedeutscht-episode-2/",
"only_matching": True,
},
]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
[r'data-video-id="(\d+)"', r"/api/video/jwplayer/#v=(\d+)"],
webpage,
"video id",
)
playlist = self._download_json(
"http://www.giga.de/api/syndication/video/video_id/%s/playlist.json?content=syndication/key/368b5f151da4ae05ced7fa296bdff65a/"
% video_id,
video_id,
)[0]
quality = qualities(["normal", "hd720"])
formats = []
for format_id in itertools.count(0):
fmt = playlist.get(compat_str(format_id))
if not fmt:
break
formats.append(
{
"url": fmt["src"],
"format_id": "%s-%s" % (fmt["quality"], fmt["type"].split("/")[-1]),
"quality": quality(fmt["quality"]),
}
)
self._sort_formats(formats)
title = self._html_search_meta("title", webpage, "title", fatal=True)
description = self._html_search_meta("description", webpage, "description")
thumbnail = self._og_search_thumbnail(webpage)
duration = parse_duration(
self._search_regex(
r'(?s)(?:data-video-id="{0}"|data-video="[^"]*/api/video/jwplayer/#v={0}[^"]*")[^>]*>.+?<span class="duration">([^<]+)</span>'.format(
video_id
),
webpage,
"duration",
fatal=False,
)
)
timestamp = parse_iso8601(
self._search_regex(
r'datetime="([^"]+)"', webpage, "upload date", fatal=False
)
)
uploader = self._search_regex(
r'class="author">([^<]+)</a>', webpage, "uploader", fatal=False
)
view_count = str_to_int(
self._search_regex(
r'<span class="views"><strong>([\d.,]+)</strong>',
webpage,
"view count",
fatal=False,
)
)
return {
"id": video_id,
"display_id": display_id,
"title": title,
"description": description,
"thumbnail": thumbnail,
"duration": duration,
"timestamp": timestamp,
"uploader": uploader,
"view_count": view_count,
"formats": formats,
}
|
messages | decode | from ..py2 import convert_py2_bytes
from .checks import check_data
from .specs import (
CHANNEL_MESSAGES,
MIN_PITCHWHEEL,
SPEC_BY_STATUS,
SYSEX_END,
SYSEX_START,
)
def _decode_sysex_data(data):
return {"data": tuple(data)}
def _decode_quarter_frame_data(data):
return {"frame_type": data[0] >> 4, "frame_value": data[0] & 15}
def _decode_songpos_data(data):
return {"pos": data[0] | (data[1] << 7)}
def _decode_pitchwheel_data(data):
return {"pitch": data[0] | ((data[1] << 7) + MIN_PITCHWHEEL)}
def _make_special_cases():
cases = {
0xE0: _decode_pitchwheel_data,
0xF0: _decode_sysex_data,
0xF1: _decode_quarter_frame_data,
0xF2: _decode_songpos_data,
}
for i in range(16):
cases[0xE0 | i] = _decode_pitchwheel_data
return cases
_SPECIAL_CASES = _make_special_cases()
def _decode_data_bytes(status_byte, data, spec):
# Subtract 1 for status byte.
if len(data) != (spec["length"] - 1):
raise ValueError("wrong number of bytes for {} message".format(spec["type"]))
# TODO: better name than args?
names = [name for name in spec["value_names"] if name != "channel"]
args = {name: value for name, value in zip(names, data)}
if status_byte in CHANNEL_MESSAGES:
# Channel is stored in the lower nibble of the status byte.
args["channel"] = status_byte & 0x0F
return args
def decode_message(msg_bytes, time=0, check=True):
"""Decode message bytes and return messages as a dictionary.
Raises ValueError if the bytes are out of range or the message is
invalid.
This is not a part of the public API.
"""
# TODO: this function is getting long.
msg_bytes = convert_py2_bytes(msg_bytes)
if len(msg_bytes) == 0:
raise ValueError("message is 0 bytes long")
status_byte = msg_bytes[0]
data = msg_bytes[1:]
try:
spec = SPEC_BY_STATUS[status_byte]
except KeyError:
raise ValueError("invalid status byte {!r}".format(status_byte))
msg = {
"type": spec["type"],
"time": time,
}
# Sysex.
if status_byte == SYSEX_START:
if len(data) < 1:
raise ValueError("sysex without end byte")
end = data[-1]
data = data[:-1]
if end != SYSEX_END:
raise ValueError("invalid sysex end byte {!r}".format(end))
if check:
check_data(data)
if status_byte in _SPECIAL_CASES:
if status_byte in CHANNEL_MESSAGES:
msg["channel"] = status_byte & 0x0F
msg.update(_SPECIAL_CASES[status_byte](data))
else:
msg.update(_decode_data_bytes(status_byte, data, spec))
return msg
|
Code | PGNreader | import LCEngine4 as LCEngine
from Code import Util
class Move:
def __init__(self):
self.pgn = ""
self.pv = ""
self.comentarios = []
self.variantes = []
self.criticas = []
self.desde = None
self.hasta = None
self.coronacion = None
self.siMate = False
self.siDesconocido = (
False # Si ha sido una terminacion de partida, por causas desconocidas
)
self.fenPrev = ""
self.fen = ""
def clona(self):
m = Move()
m.pgn = self.pgn
m.pv = self.pv
m.comentarios = self.comentarios
m.variantes = self.variantes
m.criticas = self.criticas
m.desde = self.desde
m.hasta = self.hasta
m.coronacion = self.coronacion
m.siMate = self.siMate
m.siDesconocido = self.siDesconocido
m.fenPrev = self.fenPrev
m.fen = self.fen
return m
class Moves:
def __init__(self):
self.liMoves = []
self.firstComment = ""
def toPGN(self):
li = []
if self.liMoves:
mv = self.liMoves[0]
siW = "w" in mv.fenPrev
njug = int(mv.fenPrev.split(" ")[-1])
if not siW:
li.append("%d..." % njug)
for mv in self.liMoves:
if siW:
li.append("%d.%s" % (njug, mv.pgn))
else:
njug += 1
li.append(mv.pgn)
if mv.criticas:
for una in mv.criticas:
if una.isdigit():
li.append("$%s" % una)
else:
li.append(una)
if mv.comentarios:
for uno in mv.comentarios:
li.append("{%s}" % uno)
if mv.variantes:
for una in mv.variantes:
li.append("(%s)" % una.toPGN())
siW = not siW
return " ".join(li)
def read(self, fen, txt):
ntxt = len(txt)
pos = 0
mv = Move()
mv.pgn = ""
while pos < ntxt:
c = txt[pos]
if c in "123456789":
pos += 1
while pos < ntxt and txt[pos] in "1234567890.":
pos += 1
elif c in "abcdfghKQRBNOo":
desde = pos
hasta = pos
pos += 1
while pos < ntxt and txt[pos] in "abcdefghKQRBN12345678xX-Oo=p+":
hasta += 1
pos += 1
mv = Move()
x = mv.pgn = txt[desde : hasta + 1]
if "-" in x:
if "o" in x:
x = x.replace("o", "O")
elif x[0] != "O":
x = x.replace("-", "")
mv.pgn = x
self.liMoves.append(mv)
elif c == "e":
desde = pos
hasta = pos
pos += 1
while pos < ntxt and txt[pos] in "abcdefghKQRBN12345678xX-=p.":
hasta += 1
pos += 1
mv = Move()
x = mv.pgn = txt[desde : hasta + 1]
if "-" in x:
x = x.replace("-", "")
mv.pgn = x
if x.endswith("e.p."):
x = x[:-4]
mv.pgn = x
if x and not (x in ("ep", "e.p.", "e.p", "ep.")):
self.liMoves.append(mv)
elif c == "$":
pos += 1
desde = pos
hasta = pos
while pos < ntxt and txt[pos].isdigit():
hasta += 1
pos += 1
mv.criticas.append(txt[desde:hasta])
elif c in "?!":
desde = pos
hasta = pos
pos += 1
while pos < ntxt and txt[pos] in "!?":
hasta += 1
pos += 1
mv.criticas.append(txt[desde : hasta + 1])
elif c == "(":
pos += 1
desde = pos
hasta = pos
par = 1
coment = 0
while pos < ntxt:
c = txt[pos]
if coment:
if c == "{":
coment += 1
elif c == "}":
coment -= 1
else:
if c == "(":
par += 1
elif c == ")":
par -= 1
if par == 0:
break
elif c == "{":
coment = 1
hasta += 1
pos += 1
mv.variantes.append(
txt[desde:hasta]
.replace("\r\n", " ")
.replace("\r", " ")
.replace("\n", " ")
.strip()
)
elif c == "{":
pos += 1
desde = pos
hasta = pos
par = 1
while pos < ntxt:
c = txt[pos]
if c == "{":
par += 1
elif c == "}":
par -= 1
if par == 0:
break
hasta += 1
pos += 1
comment = (
txt[desde:hasta]
.replace("\r\n", " ")
.replace("\r", " ")
.replace("\n", " ")
.strip()
)
if not mv.pgn:
self.firstComment = comment
else:
mv.comentarios.append(comment)
elif c == ";":
pos += 1
while pos < ntxt and txt[pos] != "\n":
pos += 1
elif c == "#":
mv.siMate = True
pos += 1
elif c == "*":
mv.siDesconocido = True
pos += 1
elif c in "0":
desde = pos
hasta = pos
pos += 1
while pos < ntxt and txt[pos] in "0Oo-+":
hasta += 1
pos += 1
mv = Move()
x = mv.pgn = txt[desde : hasta + 1].replace("0", "O").upper()
if x in ("O-O", "O-O-O", "O-O+", "O-O-O+"):
self.liMoves.append(mv)
else:
pos += 1
LCEngine.setFen(fen)
fenPrev = fen
for mv in self.liMoves:
mv.fenPrev = fenPrev
if mv.pgn in ("O-O+", "O-O-O+"):
mv.pgn = mv.pgn[:-1]
pv = LCEngine.lc_pgn2pv(mv.pgn)
if len(pv) < 4:
return False
mv.pv = pv
mv.desde = pv[:2]
mv.hasta = pv[2:4]
mv.coronacion = pv[4:]
if not LCEngine.movePV(mv.desde, mv.hasta, mv.coronacion):
return False
fenPrev = LCEngine.getFen()
mv.fen = fenPrev
# Se hace separado para que no influya
for mv in self.liMoves:
if mv.variantes:
livar = []
for variante in mv.variantes:
moves = Moves()
if moves.read(mv.fenPrev, variante):
livar.append(moves)
mv.variantes = livar
return True
class Game:
def __init__(self):
self.labels = Util.SymbolDict()
self.moves = Moves()
self.fen = "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1"
self.pgn = ""
self.erroneo = False
def plies(self):
return len(self.moves.liMoves)
def readLabels(self, liTxt):
for linea in liTxt:
li = linea[1:-1].replace('""', '"').split('"')
if len(li) == 3:
clave = li[0].strip().replace(" ", "")
ok = True
for c in clave:
if not (33 < ord(c) < 127):
ok = False
break
if not ok:
continue
valor = li[1].strip()
if clave and valor:
if clave.upper() == "FEN":
clave = clave.upper()
if valor:
self.fen = valor
if valor:
self.labels[clave] = valor
self.pgn = "\n".join(liTxt)
def readBody(self, body):
self.pgn += "\n\n" + body + "\n"
if not self.moves.read(self.fen, body):
self.erroneo = True
self.pvT = " ".join([move.pv for move in self.moves.liMoves if move.pv])
def pv(self):
return self.pvT
def move(self, num):
return self.moves.liMoves[num]
def read1Game(pgn):
pgnCab = []
pgnMov = []
siCab = True
siMov = False
for linea in pgn.split("\n"):
linea = linea.strip()
if siCab:
if linea:
if linea[0] == "[":
pgnCab.append(linea)
else:
siCab = False
siMov = True
pgnMov = [
linea,
]
elif siMov:
if linea:
pgnMov.append(linea)
g = Game()
g.readLabels(pgnCab)
g.readBody("\n".join(pgnMov))
return g
def readGames(pgnfile):
with Util.OpenCodec(pgnfile) as f:
pgnCab = []
pgnMov = []
siBCab = True
siCab = False
siMov = False
nbytes = 0
for linea in f:
nbytes += len(linea)
linea = linea.strip()
if siBCab:
if linea and linea[0] == "[":
pgnCab = [
linea,
]
siBCab = False
siCab = True
elif siCab:
if linea:
if linea[0] == "[":
pgnCab.append(linea)
else:
siCab = False
siMov = True
pgnMov = [
linea,
]
else:
siCab = False
siMov = True
pgnMov = []
elif siMov:
if linea:
if linea[0] == "[" and linea.endswith("]"):
g = Game()
g.nbytes = nbytes
g.readLabels(pgnCab)
g.readBody("\n".join(pgnMov))
yield g
pgnCab = [
linea,
]
siCab = True
else:
pgnMov.append(linea)
# else:
# g = Game()
# g.nbytes = nbytes
# g.readLabels(pgnCab)
# g.readBody("\n".join(pgnMov))
# yield g
# siBCab = True
if not siBCab:
g = Game()
g.nbytes = nbytes
g.readLabels(pgnCab)
g.readBody("\n".join(pgnMov))
yield g
|
scripts | opensbp_post | # ***************************************************************************
# * Copyright (c) 2014 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * FreeCAD is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Lesser General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with FreeCAD; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import datetime
import Path.Post.Utils as PostUtils
import PathScripts.PathUtils as PathUtils
TOOLTIP = """
This is an postprocessor file for the Path workbench. It will output path data
in a format suitable for OpenSBP controllers like shopbot. This postprocessor,
once placed in the appropriate PathScripts folder, can be used directly from
inside FreeCAD, via the GUI importer or via python scripts with:
import Path
Path.write(object,"/path/to/file.ncc","post_opensbp")
"""
"""
DONE:
uses native commands
handles feed and jog moves
handles XY, Z, and XYZ feed speeds
handles arcs
support for inch output
ToDo
comments may not format correctly
drilling. Haven't looked at it.
many other things
"""
TOOLTIP_ARGS = """
Arguments for opensbp:
--comments ... insert comments - mostly for debugging
--inches ... convert output to inches
--no-header ... suppress header output
--no-show-editor ... don't show editor, just save result
"""
now = datetime.datetime.now()
OUTPUT_COMMENTS = False
OUTPUT_HEADER = True
SHOW_EDITOR = True
COMMAND_SPACE = ","
# Preamble text will appear at the beginning of the GCODE output file.
PREAMBLE = """"""
# Postamble text will appear following the last operation.
POSTAMBLE = """"""
# Pre operation text will be inserted before every operation
PRE_OPERATION = """"""
# Post operation text will be inserted after every operation
POST_OPERATION = """"""
# Tool Change commands will be inserted before a tool change
TOOL_CHANGE = """"""
# to distinguish python built-in open function from the one declared below
if open.__module__ in ["__builtin__", "io"]:
pythonopen = open
CurrentState = {}
def getMetricValue(val):
return val
def getImperialValue(val):
return val / 25.4
GetValue = getMetricValue
def export(objectslist, filename, argstring):
global OUTPUT_COMMENTS
global OUTPUT_HEADER
global SHOW_EDITOR
global CurrentState
global GetValue
for arg in argstring.split():
if arg == "--comments":
OUTPUT_COMMENTS = True
if arg == "--inches":
GetValue = getImperialValue
if arg == "--no-header":
OUTPUT_HEADER = False
if arg == "--no-show-editor":
SHOW_EDITOR = False
for obj in objectslist:
if not hasattr(obj, "Path"):
s = "the object " + obj.Name
s += " is not a path. Please select only path and Compounds."
print(s)
return
CurrentState = {
"X": 0,
"Y": 0,
"Z": 0,
"F": 0,
"S": 0,
"JSXY": 0,
"JSZ": 0,
"MSXY": 0,
"MSZ": 0,
}
print("postprocessing...")
gcode = ""
# write header
if OUTPUT_HEADER:
gcode += linenumber() + "'Exported by FreeCAD\n"
gcode += linenumber() + "'Post Processor: " + __name__ + "\n"
gcode += linenumber() + "'Output Time:" + str(now) + "\n"
# Write the preamble
if OUTPUT_COMMENTS:
gcode += linenumber() + "'(begin preamble)\n"
for line in PREAMBLE.splitlines(True):
gcode += linenumber() + line
for obj in objectslist:
# do the pre_op
if OUTPUT_COMMENTS:
gcode += linenumber() + "'(begin operation: " + obj.Label + ")\n"
for line in PRE_OPERATION.splitlines(True):
gcode += linenumber() + line
gcode += parse(obj)
# do the post_op
if OUTPUT_COMMENTS:
gcode += linenumber() + "'(finish operation: " + obj.Label + ")\n"
for line in POST_OPERATION.splitlines(True):
gcode += linenumber() + line
# do the post_amble
if OUTPUT_COMMENTS:
gcode += "'(begin postamble)\n"
for line in POSTAMBLE.splitlines(True):
gcode += linenumber() + line
if SHOW_EDITOR:
dia = PostUtils.GCodeEditorDialog()
dia.editor.setText(gcode)
result = dia.exec_()
if result:
final = dia.editor.toPlainText()
else:
final = gcode
else:
final = gcode
print("done postprocessing.")
# Write the output
gfile = pythonopen(filename, "w")
gfile.write(final)
gfile.close()
def move(command):
txt = ""
# if 'F' in command.Parameters:
# txt += feedrate(command)
axis = ""
for p in ["X", "Y", "Z"]:
if p in command.Parameters:
if command.Parameters[p] != CurrentState[p]:
axis += p
if "F" in command.Parameters:
speed = command.Parameters["F"]
if command.Name in ["G1", "G01"]: # move
movetype = "MS"
else: # jog
movetype = "JS"
zspeed = ""
xyspeed = ""
if "Z" in axis:
speedKey = "{}Z".format(movetype)
speedVal = GetValue(speed)
if CurrentState[speedKey] != speedVal:
CurrentState[speedKey] = speedVal
zspeed = "{:f}".format(speedVal)
if ("X" in axis) or ("Y" in axis):
speedKey = "{}XY".format(movetype)
speedVal = GetValue(speed)
if CurrentState[speedKey] != speedVal:
CurrentState[speedKey] = speedVal
xyspeed = "{:f}".format(speedVal)
if zspeed or xyspeed:
txt += "{},{},{}\n".format(movetype, xyspeed, zspeed)
if command.Name in ["G0", "G00"]:
pref = "J"
else:
pref = "M"
if axis == "X":
txt += pref + "X"
txt += "," + format(GetValue(command.Parameters["X"]), ".4f")
txt += "\n"
elif axis == "Y":
txt += pref + "Y"
txt += "," + format(GetValue(command.Parameters["Y"]), ".4f")
txt += "\n"
elif axis == "Z":
txt += pref + "Z"
txt += "," + format(GetValue(command.Parameters["Z"]), ".4f")
txt += "\n"
elif axis == "XY":
txt += pref + "2"
txt += "," + format(GetValue(command.Parameters["X"]), ".4f")
txt += "," + format(GetValue(command.Parameters["Y"]), ".4f")
txt += "\n"
elif axis == "XZ":
txt += pref + "3"
txt += "," + format(GetValue(command.Parameters["X"]), ".4f")
txt += ","
txt += "," + format(GetValue(command.Parameters["Z"]), ".4f")
txt += "\n"
elif axis == "XYZ":
txt += pref + "3"
txt += "," + format(GetValue(command.Parameters["X"]), ".4f")
txt += "," + format(GetValue(command.Parameters["Y"]), ".4f")
txt += "," + format(GetValue(command.Parameters["Z"]), ".4f")
txt += "\n"
elif axis == "YZ":
txt += pref + "3"
txt += ","
txt += "," + format(GetValue(command.Parameters["Y"]), ".4f")
txt += "," + format(GetValue(command.Parameters["Z"]), ".4f")
txt += "\n"
elif axis == "":
print("warning: skipping duplicate move.")
else:
print(CurrentState)
print(command)
print("I don't know how to handle '{}' for a move.".format(axis))
return txt
def arc(command):
if command.Name == "G2": # CW
dirstring = "1"
else: # G3 means CCW
dirstring = "-1"
txt = "CG,,"
txt += format(GetValue(command.Parameters["X"]), ".4f") + ","
txt += format(GetValue(command.Parameters["Y"]), ".4f") + ","
txt += format(GetValue(command.Parameters["I"]), ".4f") + ","
txt += format(GetValue(command.Parameters["J"]), ".4f") + ","
txt += "T" + ","
txt += dirstring
txt += "\n"
return txt
def tool_change(command):
txt = ""
if OUTPUT_COMMENTS:
txt += "'a tool change happens now\n"
for line in TOOL_CHANGE.splitlines(True):
txt += line
txt += "&ToolName=" + str(int(command.Parameters["T"]))
txt += "\n"
txt += "&Tool=" + str(int(command.Parameters["T"]))
txt += "\n"
return txt
def comment(command):
print("a comment", command)
return
def spindle(command):
txt = ""
if command.Name == "M3": # CW
pass
else:
pass
txt += "TR," + str(command.Parameters["S"]) + "\n"
txt += "C6\n"
txt += "PAUSE 2\n"
return txt
# Supported Commands
scommands = {
"G0": move,
"G1": move,
"G2": arc,
"G3": arc,
"M6": tool_change,
"M3": spindle,
"G00": move,
"G01": move,
"G02": arc,
"G03": arc,
"M06": tool_change,
"M03": spindle,
"message": comment,
}
def parse(pathobj):
output = ""
# Above list controls the order of parameters
if hasattr(pathobj, "Group"): # We have a compound or project.
if OUTPUT_COMMENTS:
output += linenumber() + "'(compound: " + pathobj.Label + ")\n"
for p in pathobj.Group:
output += parse(p)
else: # parsing simple path
# groups might contain non-path things like stock.
if not hasattr(pathobj, "Path"):
return output
if OUTPUT_COMMENTS:
output += linenumber() + "'(Path: " + pathobj.Label + ")\n"
for c in PathUtils.getPathWithPlacement(pathobj).Commands:
command = c.Name
if command in scommands:
output += scommands[command](c)
if c.Parameters:
CurrentState.update(c.Parameters)
elif command[0] == "(":
output += "' " + command + "\n"
else:
print("I don't know what the hell the command: ", end="")
print(command + " means. Maybe I should support it.")
return output
def linenumber():
return ""
# print(__name__ + " gcode postprocessor loaded.")
|
tools | gifTools | import os
import shutil
import tempfile
import AppKit
from drawBot.misc import executeExternalProcess, getExternalToolPath
def generateGif(sourcePaths, destPath, delays, loop=True):
gifsiclePath = getExternalToolPath(os.path.dirname(__file__), "gifsicle")
assert gifsiclePath is not None
cmds = [
# gifsicle path
gifsiclePath,
# optimize level
# "-O3",
# ignore warnings
"-w",
# force to 256 colors
"--colors",
"256",
]
if loop:
# make it loop
cmds.append("--loop")
# add source paths with delay for each frame
for i, inputPath in enumerate(sourcePaths):
cmds += [
# add the frame duration
"--delay",
"%i" % delays[i],
# add the input gif for each frame
inputPath,
]
cmds += [
# output path
"--output",
destPath,
]
executeExternalProcess(cmds)
# remove the temp input gifs
for inputPath in sourcePaths:
os.remove(inputPath)
_explodedGifCache = {}
def _explodeGif(path):
gifsiclePath = getExternalToolPath(os.path.dirname(__file__), "gifsicle")
if isinstance(path, AppKit.NSURL):
path = path.path()
destRoot = tempfile.mkdtemp()
cmds = [
gifsiclePath,
# explode
"--explode",
# source path
path,
]
executeExternalProcess(cmds, cwd=destRoot)
files = os.listdir(destRoot)
_explodedGifCache[path] = dict(
source=destRoot,
fileNames=sorted(files),
)
def clearExplodedGifCache():
for path, info in _explodedGifCache.items():
shutil.rmtree(info["source"])
_explodedGifCache.clear()
def gifFrameCount(path):
if isinstance(path, AppKit.NSURL):
path = path.path()
if path not in _explodedGifCache:
_explodeGif(path)
frameCount = len(_explodedGifCache[path]["fileNames"])
if frameCount == 0:
return None
return frameCount
def gifFrameAtIndex(path, index):
if isinstance(path, AppKit.NSURL):
path = path.path()
if path not in _explodedGifCache:
_explodeGif(path)
source = _explodedGifCache[path]["source"]
fileNames = _explodedGifCache[path]["fileNames"]
fileName = os.path.join(source, fileNames[index])
url = AppKit.NSURL.fileURLWithPath_(fileName)
return AppKit.NSImage.alloc().initByReferencingURL_(url)
|
draftfunctions | svgtext | # -*- coding: utf8 -*-
# ***************************************************************************
# * Copyright (c) 2009 Yorik van Havre <yorik@uncreated.net> *
# * Copyright (c) 2018 George Shuklin (amarao) *
# * Copyright (c) 2020 Eliud Cabrera Castillo <e.cabrera-castillo@tum.de> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provides functions to return the SVG representation of text elements."""
## @package svgtext
# \ingroup draftfunctions
# \brief Provides functions to return the SVG representation of text elements.
import math
import draftutils.utils as utils
import FreeCAD as App
## \addtogroup draftfunctions
# @{
def _get_text_techdraw(
text, tcolor, fontsize, anchor, align, fontname, angle, base, linespacing
):
"""Return the SVG representation of text for TechDraw display.
`text` is a list of textual elements; they are iterated, styled,
and added around a `<text>` tag.
::
<text ...> text[0] </text>
<text ...> text[1] </text>
"""
svg = ""
for i in range(len(text)):
_t = text[i].replace("&", "&")
_t = _t.replace("<", "<")
t = _t.replace(">", ">")
svg += "<text "
svg += 'stroke-width="0" stroke="{}" '.format(tcolor)
svg += 'fill="{}" font-size="{}" '.format(tcolor, fontsize)
svg += 'style="text-anchor:{};text-align:{};'.format(anchor, align.lower())
svg += 'font-family:{}" '.format(fontname)
svg += 'transform="'
svg += "rotate({},{},{}) ".format(
math.degrees(angle), base.x, base.y - i * linespacing
)
svg += "translate({},{}) ".format(base.x, base.y - i * linespacing)
svg += 'scale(1,-1)"'
# svg += 'freecad:skip="1"'
svg += ">\n"
svg += t
svg += "</text>\n"
return svg
def _get_text_header(tcolor, fontsize, anchor, align, fontname, angle, base, flip):
"""Return the initial <text> tag with style options.
The text must be added after this tag, and then must be closed.
::
<text ...>
...
</text>
"""
svg = "<text "
svg += 'stroke-width="0" stroke="{}" '.format(tcolor)
svg += 'fill="{}" font-size="{}" '.format(tcolor, fontsize)
svg += 'style="text-anchor:{};text-align:{};'.format(anchor, align.lower())
svg += 'font-family:{}" '.format(fontname)
svg += 'transform="'
svg += "rotate({},{},{}) ".format(math.degrees(angle), base.x, base.y)
if flip:
svg += "translate({},{}) ".format(base.x, base.y)
else:
svg += "translate({},{}) ".format(base.x, -base.y)
# svg += 'scale({},-{}) '.format(tmod/2000, tmod/2000)
if flip:
svg += "scale(1,-1) "
else:
svg += "scale(1,1) "
svg += '" '
svg += 'freecad:skip="1"'
svg += ">\n"
return svg
def get_text(
plane,
techdraw,
tcolor,
fontsize,
fontname,
angle,
base,
text,
linespacing=0.5,
align="center",
flip=True,
):
"""Get the SVG representation of a textual element."""
if isinstance(angle, App.Rotation):
if not plane:
angle = angle.Angle
else:
if plane.axis.getAngle(angle.Axis) < 0.001:
angle = angle.Angle
elif abs(plane.axis.getAngle(angle.Axis) - math.pi) < 0.001:
if abs(angle.Angle) > 0.1:
angle = -angle.Angle
else:
angle = angle.Angle
elif abs(plane.axis.getAngle(angle.Axis) - math.pi / 2) < 0.001:
# text is perpendicular to view, so it shouldn't appear
return ""
else:
# TODO maybe there is something better to do here?
angle = 0
# text should be a list of strings separated by a newline
if not isinstance(text, list):
text = text.split("\n")
if align.lower() == "center":
anchor = "middle"
elif align.lower() == "left":
anchor = "start"
else:
anchor = "end"
if techdraw:
# For TechDraw display each item in the text list is placed
# in an individual tag.
# <text ...> text[0] </text>
# <text ...> text[1] </text>
svg = _get_text_techdraw(
text, tcolor, fontsize, anchor, align, fontname, angle, base, linespacing
)
else:
# If the SVG is not for TechDraw, and there is a single item
# in the text list, place it in a single tag.
# <text ...> text </text>
#
# For multiple elements, place each element inside a <tspan> tag.
# <text ...>
# <tspan>text[0]</tspan>
# <tspan>text[1]</tspan>
# </text>
svg = _get_text_header(
tcolor, fontsize, anchor, align, fontname, angle, base, flip
)
if len(text) == 1:
_t = text[0].replace("&", "&").replace("<", "<")
svg += _t.replace(">", ">")
else:
for i in range(len(text)):
if i == 0:
svg += "<tspan>"
else:
svg += '<tspan x="0" dy="{}">'.format(linespacing)
_t = text[i].replace("&", "&").replace("<", "<")
svg += _t.replace(">", ">")
svg += "</tspan>\n"
svg += "</text>\n"
return svg
def getText(
plane,
techdraw,
tcolor,
fontsize,
fontname,
angle,
base,
text,
linespacing=0.5,
align="center",
flip=True,
):
"""Get the SVG representation of a textual element. DEPRECATED."""
utils.use_instead("get_text")
return get_text(
plane,
techdraw,
tcolor,
fontsize,
fontname,
angle,
base,
text,
linespacing,
align,
flip,
)
## @}
|
OLD | lineproc | ################################################################
### functions specific to text line processing
### (text line segmentation is in lineseg)
################################################################
import morph
from pylab import *
from scipy import stats
from scipy.ndimage import filters, interpolation, morphology
from toplevel import *
################################################################
### line segmentation geometry estimates based on
### segmentations
################################################################
seg_geometry_display = 0
geowin = None
geoax = None
@checks(SEGMENTATION,math=BOOL)
def seg_geometry(segmentation,math=1):
"""Given a line segmentation (either an rseg--preferably connected
component based--or a cseg, return (mh,a,b), where mh is the
medium component height, and y=a*x+b is a line equation (in
Postscript coordinates) for the center of the text line. This
function is used as a simple, standard estimator of text line
geometry. The intended use is to encode the size and centers of
bounding boxes relative to these estimates and add these as
features to the input of a character classifier, allowing it to
distinguish otherwise ambiguous pairs like ,/' and o/O."""
boxes = seg_boxes(segmentation,math=math)
heights = [(y1-y0) for (y0,y1,x0,x1) in boxes]
mh = stats.scoreatpercentile(heights,per=40)
centers = [(avg(y0,y1),avg(x0,x1)) for (y0,y1,x0,x1) in boxes]
xs = array([x for y,x in centers])
ys = array([y for y,x in centers])
a,b = polyfit(xs,ys,1)
if seg_geometry_display:
print "seggeo",math
from matplotlib import patches
global geowin,geoax
old = gca()
if geowin is None:
geowin = figure()
geoax = geowin.add_subplot(111)
geoax.cla()
geoax.imshow(segmentation!=0,cmap=cm.gray)
for (y0,y1,x0,x1) in boxes:
p = patches.Rectangle((x0,y0),x1-x0,y1-y0,edgecolor="red",fill=0)
geoax.add_patch(p)
xm = max(xs)
geoax.plot([0,xm],[b,a*xm+b],'b')
geoax.plot([0,xm],[b-mh/2,a*xm+b-mh/2],'y')
geoax.plot([0,xm],[b+mh/2,a*xm+b+mh/2],'y')
geoax.plot(xs,[y for y in ys],"g.")
sca(old)
print "mh",mh,"a",a,"b",b
return mh,a,b
def avg(*args):
return mean(args)
@deprecated
def rel_char_geom(box,params):
"""Given a character bounding box and a set of line geometry parameters,
compute relative character position and size."""
y0,y1,x0,x1 = box
assert y1>y0 and x1>x0,"%s %s"%((x0,x1),(y0,y1))
mh,a,b = params
y = avg(y0,y1)
x = avg(x0,x1)
yl = a*x+b
rel_ypos = (y-yl)/mh
rel_width = (x1-x0)*1.0/mh
rel_height = (y1-y0)*1.0/mh
# ensure some reasonable bounds
assert rel_ypos>-100 and rel_ypos<100
assert rel_width>0 and rel_width<100
assert rel_height>0 and rel_height<100
return rel_ypos,rel_width,rel_height
@deprecated
def rel_geo_normalize(rel):
"""Given a set of geometric parameters, normalize them into the
range -1...1 so that they can be used as input to a neural network."""
if rel is None: return None
if type(rel)==str:
rel = [float(x) for x in rel.split()]
ry,rw,rh = rel
if not (rw>0 and rh>0): return None
ry = clip(2*ry,-1.0,1.0)
rw = clip(log(rw),-1.0,1.0)
rh = clip(log(rh),-1.0,1.0)
geometry = array([ry,rw,rh],'f')
return geometry
@deprecated
def seg_boxes(seg,math=0):
"""Given a color segmentation, return a list of bounding boxes.
Bounding boxes are returned as tuples (y0,y1,x0,x1). With
math=0, raster coordinates are used, with math=1, Postscript
coordinates are used (however, the order of the values in the
tuple doesn't change)."""
seg = array(seg,'uint32')
slices = morph.find_objects(seg)
h = seg.shape[0]
result = []
for i in range(len(slices)):
if slices[i] is None: continue
(ys,xs) = slices[i]
if math:
result += [(h-ys.stop-1,h-ys.start-1,xs.start,xs.stop)]
else:
result += [(ys.start,ys.stop,xs.start,xs.stop)]
return result
################################################################
### image based estimation of line geometry, as well
### as dewarping
################################################################
@checks(DARKLINE)
def estimate_baseline(line,order=3):
"""Compute the baseline by fitting a polynomial to the gradient.
TODO: use robust fitting, special case very short line, limit parameter ranges"""
line = line*1.0/amax(line)
vgrad = morphology.grey_closing(line,(1,40))
vgrad = filters.gaussian_filter(vgrad,(2,60),(1,0))
if amin(vgrad)>0 or amax(vgrad)<0: raise BadImage()
h,w = vgrad.shape
ys = argmin(vgrad,axis=0)
xs = arange(w)
baseline = polyfit(xs,ys,order)
print baseline
return baseline
@checks(DARKLINE)
def dewarp_line(line,show=0,order=3):
"""Dewarp the baseline of a line based in estimate_baseline.
Returns the dewarped image."""
line = line*1.0/amax(line)
line = r_[zeros(line.shape),line]
h,w = line.shape
baseline = estimate_baseline(line,order=order)
ys = polyval(baseline,arange(w))
base = 2*h/3
temp = zeros(line.shape)
for x in range(w):
temp[:,x] = interpolation.shift(line[:,x],(base-ys[x]),order=1)
return temp
#line = line*1.0/amax(line)
@checks(DARKLINE)
def estimate_xheight(line,scale=1.0,debug=0):
"""Estimates the xheight of a line based on image processing and
filtering."""
vgrad = morphology.grey_closing(line,(1,int(scale*40)))
vgrad = filters.gaussian_filter(vgrad,(2,int(scale*60)),(1,0))
if amin(vgrad)>0 or amax(vgrad)<0: raise BadImage("bad line")
if debug: imshow(vgrad)
proj = sum(vgrad,1)
proj = filters.gaussian_filter(proj,0.5)
top = argmax(proj)
bottom = argmin(proj)
return bottom-top,bottom
@checks(DARKLINE)
def latin_mask(line,scale=1.0,r=1.2,debug=0):
"""Estimate a mask that covers letters and diacritics of a text
line for Latin alphabets."""
vgrad = morphology.grey_closing(1.0*line,(1,int(scale*40)))
vgrad = filters.gaussian_filter(vgrad,(2,int(scale*60)),(1,0))
tops = argmax(vgrad,0)
bottoms = argmin(vgrad,0)
mask = zeros(line.shape)
# xheight = mean(bottoms-tops)
for i in range(len(bottoms)):
d = bottoms[i]-tops[i]
y0 = int(maximum(0,bottoms[i]-r*d))
mask[y0:bottoms[i],i] = 1
return mask
@checks(DARKLINE)
def latin_filter(line,scale=1.0,r=1.5,debug=0):
"""Filter out noise from a text line in Latin alphabets."""
bin = (line>0.5*amax(line))
mask = latin_mask(bin,scale=scale,r=r,debug=debug)
mask = morph.keep_marked(bin,mask)
mask = filters.maximum_filter(mask,3)
return line*mask
|
extractor | mediaset | # coding: utf-8
from __future__ import unicode_literals
import re
from ..compat import compat_parse_qs, compat_urllib_parse_urlparse
from ..utils import ExtractorError, int_or_none, update_url_query
from .theplatform import ThePlatformBaseIE
class MediasetIE(ThePlatformBaseIE):
_TP_TLD = "eu"
_VALID_URL = r"""(?x)
(?:
mediaset:|
https?://
(?:(?:www|static3)\.)?mediasetplay\.mediaset\.it/
(?:
(?:video|on-demand|movie)/(?:[^/]+/)+[^/]+_|
player(?:/v\d+)?/index\.html\?.*?\bprogramGuid=
)
)(?P<id>[0-9A-Z]{16,})
"""
_TESTS = [
{
# full episode
"url": "https://www.mediasetplay.mediaset.it/video/hellogoodbye/quarta-puntata_FAFU000000661824",
"md5": "9b75534d42c44ecef7bf1ffeacb7f85d",
"info_dict": {
"id": "FAFU000000661824",
"ext": "mp4",
"title": "Quarta puntata",
"description": "md5:d41d8cd98f00b204e9800998ecf8427e",
"thumbnail": r"re:^https?://.*\.jpg$",
"duration": 1414.26,
"upload_date": "20161107",
"series": "Hello Goodbye",
"timestamp": 1478532900,
"uploader": "Rete 4",
"uploader_id": "R4",
},
},
{
"url": "https://www.mediasetplay.mediaset.it/video/matrix/puntata-del-25-maggio_F309013801000501",
"md5": "288532f0ad18307705b01e581304cd7b",
"info_dict": {
"id": "F309013801000501",
"ext": "mp4",
"title": "Puntata del 25 maggio",
"description": "md5:d41d8cd98f00b204e9800998ecf8427e",
"thumbnail": r"re:^https?://.*\.jpg$",
"duration": 6565.007,
"upload_date": "20180526",
"series": "Matrix",
"timestamp": 1527326245,
"uploader": "Canale 5",
"uploader_id": "C5",
},
},
{
# clip
"url": "https://www.mediasetplay.mediaset.it/video/gogglebox/un-grande-classico-della-commedia-sexy_FAFU000000661680",
"only_matching": True,
},
{
# iframe simple
"url": "https://static3.mediasetplay.mediaset.it/player/index.html?appKey=5ad3966b1de1c4000d5cec48&programGuid=FAFU000000665924&id=665924",
"only_matching": True,
},
{
# iframe twitter (from http://www.wittytv.it/se-prima-mi-fidavo-zero/)
"url": "https://static3.mediasetplay.mediaset.it/player/index.html?appKey=5ad3966b1de1c4000d5cec48&programGuid=FAFU000000665104&id=665104",
"only_matching": True,
},
{
# embedUrl (from https://www.wittytv.it/amici/est-ce-que-tu-maimes-gabriele-5-dicembre-copia/)
"url": "https://static3.mediasetplay.mediaset.it/player/v2/index.html?partnerId=wittytv&configId=&programGuid=FD00000000153323&autoplay=true&purl=http://www.wittytv.it/amici/est-ce-que-tu-maimes-gabriele-5-dicembre-copia/",
"only_matching": True,
},
{
"url": "mediaset:FAFU000000665924",
"only_matching": True,
},
{
"url": "https://www.mediasetplay.mediaset.it/video/mediasethaacuoreilfuturo/palmieri-alicudi-lisola-dei-tre-bambini-felici--un-decreto-per-alicudi-e-tutte-le-microscuole_FD00000000102295",
"only_matching": True,
},
{
"url": "https://www.mediasetplay.mediaset.it/video/cherryseason/anticipazioni-degli-episodi-del-23-ottobre_F306837101005C02",
"only_matching": True,
},
{
"url": "https://www.mediasetplay.mediaset.it/video/tg5/ambiente-onda-umana-per-salvare-il-pianeta_F309453601079D01",
"only_matching": True,
},
{
"url": "https://www.mediasetplay.mediaset.it/video/grandefratellovip/benedetta-una-doccia-gelata_F309344401044C135",
"only_matching": True,
},
{
"url": "https://www.mediasetplay.mediaset.it/movie/herculeslaleggendahainizio/hercules-la-leggenda-ha-inizio_F305927501000102",
"only_matching": True,
},
]
@staticmethod
def _extract_urls(ie, webpage):
def _qs(url):
return compat_parse_qs(compat_urllib_parse_urlparse(url).query)
def _program_guid(qs):
return qs.get("programGuid", [None])[0]
entries = []
for mobj in re.finditer(
r'<iframe\b[^>]+\bsrc=(["\'])(?P<url>(?:https?:)?//(?:www\.)?video\.mediaset\.it/player/playerIFrame(?:Twitter)?\.shtml.*?)\1',
webpage,
):
embed_url = mobj.group("url")
embed_qs = _qs(embed_url)
program_guid = _program_guid(embed_qs)
if program_guid:
entries.append(embed_url)
continue
video_id = embed_qs.get("id", [None])[0]
if not video_id:
continue
urlh = ie._request_webpage(
embed_url, video_id, note="Following embed URL redirect"
)
embed_url = urlh.geturl()
program_guid = _program_guid(_qs(embed_url))
if program_guid:
entries.append(embed_url)
return entries
def _parse_smil_formats(
self,
smil,
smil_url,
video_id,
namespace=None,
f4m_params=None,
transform_rtmp_url=None,
):
for video in smil.findall(self._xpath_ns(".//video", namespace)):
video.attrib["src"] = re.sub(
r"(https?://vod05)t(-mediaset-it\.akamaized\.net/.+?.mpd)\?.+",
r"\1\2",
video.attrib["src"],
)
return super(MediasetIE, self)._parse_smil_formats(
smil, smil_url, video_id, namespace, f4m_params, transform_rtmp_url
)
def _real_extract(self, url):
guid = self._match_id(url)
tp_path = "PR1GhC/media/guid/2702976343/" + guid
info = self._extract_theplatform_metadata(tp_path, guid)
formats = []
subtitles = {}
first_e = None
for asset_type in ("SD", "HD"):
# TODO: fixup ISM+none manifest URLs
for f in ("MPEG4", "MPEG-DASH+none", "M3U+none"):
try:
tp_formats, tp_subtitles = self._extract_theplatform_smil(
update_url_query(
"http://link.theplatform.%s/s/%s" % (self._TP_TLD, tp_path),
{
"mbr": "true",
"formats": f,
"assetTypes": asset_type,
},
),
guid,
"Downloading %s %s SMIL data" % (f.split("+")[0], asset_type),
)
except ExtractorError as e:
if not first_e:
first_e = e
break
for tp_f in tp_formats:
tp_f["quality"] = 1 if asset_type == "HD" else 0
formats.extend(tp_formats)
subtitles = self._merge_subtitles(subtitles, tp_subtitles)
if first_e and not formats:
raise first_e
self._sort_formats(formats)
fields = []
for templ, repls in (
("tvSeason%sNumber", ("", "Episode")),
("mediasetprogram$%s", ("brandTitle", "numberOfViews", "publishInfo")),
):
fields.extend(templ % repl for repl in repls)
feed_data = self._download_json(
"https://feed.entertainment.tv.theplatform.eu/f/PR1GhC/mediaset-prod-all-programs/guid/-/"
+ guid,
guid,
fatal=False,
query={"fields": ",".join(fields)},
)
if feed_data:
publish_info = feed_data.get("mediasetprogram$publishInfo") or {}
info.update(
{
"episode_number": int_or_none(
feed_data.get("tvSeasonEpisodeNumber")
),
"season_number": int_or_none(feed_data.get("tvSeasonNumber")),
"series": feed_data.get("mediasetprogram$brandTitle"),
"uploader": publish_info.get("description"),
"uploader_id": publish_info.get("channel"),
"view_count": int_or_none(
feed_data.get("mediasetprogram$numberOfViews")
),
}
)
info.update(
{
"id": guid,
"formats": formats,
"subtitles": subtitles,
}
)
return info
|
PrinterOutput | NetworkMJPGImage | # Copyright (c) 2018 Aldo Hoeben / fieldOfView
# NetworkMJPGImage is released under the terms of the LGPLv3 or higher.
from PyQt6.QtCore import QByteArray, QRect, QUrl, pyqtProperty, pyqtSignal, pyqtSlot
from PyQt6.QtGui import QImage, QPainter
from PyQt6.QtNetwork import QNetworkAccessManager, QNetworkReply, QNetworkRequest
from PyQt6.QtQuick import QQuickPaintedItem
from UM.Logger import Logger
#
# A QQuickPaintedItem that progressively downloads a network mjpeg stream,
# picks it apart in individual jpeg frames, and paints it.
#
class NetworkMJPGImage(QQuickPaintedItem):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._stream_buffer = QByteArray()
self._stream_buffer_start_index = -1
self._network_manager = None # type: QNetworkAccessManager
self._image_request = None # type: QNetworkRequest
self._image_reply = None # type: QNetworkReply
self._image = QImage()
self._image_rect = QRect()
self._source_url = QUrl()
self._started = False
self._mirror = False
self.setAntialiasing(True)
def __del__(self) -> None:
"""Ensure that close gets called when object is destroyed"""
self.stop()
def paint(self, painter: "QPainter") -> None:
if self._mirror:
painter.drawImage(self.contentsBoundingRect(), self._image.mirrored())
return
painter.drawImage(self.contentsBoundingRect(), self._image)
def setSourceURL(self, source_url: "QUrl") -> None:
self._source_url = source_url
self.sourceURLChanged.emit()
if self._started:
self.start()
def getSourceURL(self) -> "QUrl":
return self._source_url
sourceURLChanged = pyqtSignal()
source = pyqtProperty(
QUrl, fget=getSourceURL, fset=setSourceURL, notify=sourceURLChanged
)
def setMirror(self, mirror: bool) -> None:
if mirror == self._mirror:
return
self._mirror = mirror
self.mirrorChanged.emit()
self.update()
def getMirror(self) -> bool:
return self._mirror
mirrorChanged = pyqtSignal()
mirror = pyqtProperty(bool, fget=getMirror, fset=setMirror, notify=mirrorChanged)
imageSizeChanged = pyqtSignal()
@pyqtProperty(int, notify=imageSizeChanged)
def imageWidth(self) -> int:
return self._image.width()
@pyqtProperty(int, notify=imageSizeChanged)
def imageHeight(self) -> int:
return self._image.height()
@pyqtSlot()
def start(self) -> None:
self.stop() # Ensure that previous requests (if any) are stopped.
if not self._source_url:
Logger.log("w", "Unable to start camera stream without target!")
return
self._started = True
self._image_request = QNetworkRequest(self._source_url)
if self._network_manager is None:
self._network_manager = QNetworkAccessManager()
self._image_reply = self._network_manager.get(self._image_request)
self._image_reply.downloadProgress.connect(self._onStreamDownloadProgress)
@pyqtSlot()
def stop(self) -> None:
self._stream_buffer = QByteArray()
self._stream_buffer_start_index = -1
if self._image_reply:
try:
try:
self._image_reply.downloadProgress.disconnect(
self._onStreamDownloadProgress
)
except Exception:
pass
if not self._image_reply.isFinished():
self._image_reply.close()
except Exception: # RuntimeError
pass # It can happen that the wrapped c++ object is already deleted.
self._image_reply = None
self._image_request = None
self._network_manager = None
self._started = False
def _onStreamDownloadProgress(self, bytes_received: int, bytes_total: int) -> None:
# An MJPG stream is (for our purpose) a stream of concatenated JPG images.
# JPG images start with the marker 0xFFD8, and end with 0xFFD9
if self._image_reply is None:
return
self._stream_buffer += self._image_reply.readAll()
if (
len(self._stream_buffer) > 2000000
): # No single camera frame should be 2 Mb or larger
Logger.log(
"w", "MJPEG buffer exceeds reasonable size. Restarting stream..."
)
self.stop() # resets stream buffer and start index
self.start()
return
if self._stream_buffer_start_index == -1:
self._stream_buffer_start_index = self._stream_buffer.indexOf(b"\xff\xd8")
stream_buffer_end_index = self._stream_buffer.lastIndexOf(b"\xff\xd9")
# If this happens to be more than a single frame, then so be it; the JPG decoder will
# ignore the extra data. We do it like this in order not to get a buildup of frames
if self._stream_buffer_start_index != -1 and stream_buffer_end_index != -1:
jpg_data = self._stream_buffer[
self._stream_buffer_start_index : stream_buffer_end_index + 2
]
self._stream_buffer = self._stream_buffer[stream_buffer_end_index + 2 :]
self._stream_buffer_start_index = -1
self._image.loadFromData(jpg_data)
if self._image.rect() != self._image_rect:
self.imageSizeChanged.emit()
self.update()
|
generateBase | generateModel_Module | #!/usr/bin/env python3
#
# Generated Wed Sep 27 11:00:46 2023 by generateDS.py.
# Update it with: python generateDS.py -o generateModel_Module.py generateMetaModel_Module.xsd
#
# WARNING! All changes made in this file will be lost!
#
import getopt
import sys
from xml.dom import Node, minidom
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Support/utility functions.
#
def showIndent(outfile, level):
for idx in range(level):
outfile.write(" ")
def quote_xml(inStr):
s1 = inStr
s1 = s1.replace("&", "&")
s1 = s1.replace("<", "<")
s1 = s1.replace('"', """)
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find("\n") == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find("\n") == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, name)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write("<%s>%s</%s>" % (self.name, self.value, self.name))
elif (
self.content_type == MixedContainer.TypeInteger
or self.content_type == MixedContainer.TypeBoolean
):
outfile.write("<%s>%d</%s>" % (self.name, self.value, self.name))
elif (
self.content_type == MixedContainer.TypeFloat
or self.content_type == MixedContainer.TypeDecimal
):
outfile.write("<%s>%f</%s>" % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write("<%s>%g</%s>" % (self.name, self.value, self.name))
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'MixedContainer(%d, %d, "%s", "%s"),\n'
% (self.category, self.content_type, self.name, self.value)
)
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'MixedContainer(%d, %d, "%s", "%s"),\n'
% (self.category, self.content_type, self.name, self.value)
)
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'MixedContainer(%d, %d, "%s",\n'
% (
self.category,
self.content_type,
self.name,
)
)
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(")\n")
#
# Data representation classes.
#
class GenerateModel:
subclass = None
def __init__(self, Module=None, PythonExport=None):
if Module is None:
self.Module = []
else:
self.Module = Module
if PythonExport is None:
self.PythonExport = []
else:
self.PythonExport = PythonExport
def factory(*args_, **kwargs_):
if GenerateModel.subclass:
return GenerateModel.subclass(*args_, **kwargs_)
else:
return GenerateModel(*args_, **kwargs_)
factory = staticmethod(factory)
def getModule(self):
return self.Module
def setModule(self, Module):
self.Module = Module
def addModule(self, value):
self.Module.append(value)
def insertModule(self, index, value):
self.Module[index] = value
def getPythonexport(self):
return self.PythonExport
def setPythonexport(self, PythonExport):
self.PythonExport = PythonExport
def addPythonexport(self, value):
self.PythonExport.append(value)
def insertPythonexport(self, index, value):
self.PythonExport[index] = value
def export(self, outfile, level, name_="GenerateModel"):
showIndent(outfile, level)
outfile.write("<%s>\n" % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="GenerateModel"):
pass
def exportChildren(self, outfile, level, name_="GenerateModel"):
for Module_ in self.getModule():
Module_.export(outfile, level)
for PythonExport_ in self.getPythonexport():
PythonExport_.export(outfile, level)
def exportLiteral(self, outfile, level, name_="GenerateModel"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write("Module=[\n")
level += 1
for Module in self.Module:
showIndent(outfile, level)
outfile.write("Module(\n")
Module.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
showIndent(outfile, level)
outfile.write("PythonExport=[\n")
level += 1
for PythonExport in self.PythonExport:
showIndent(outfile, level)
outfile.write("PythonExport(\n")
PythonExport.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Module":
obj_ = Module.factory()
obj_.build(child_)
self.Module.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "PythonExport":
obj_ = PythonExport.factory()
obj_.build(child_)
self.PythonExport.append(obj_)
# end class GenerateModel
class PythonExport:
subclass = None
def __init__(
self,
Name="",
PythonName="",
Include="",
Father="",
Twin="",
Namespace="",
FatherInclude="",
FatherNamespace="",
Constructor=0,
NumberProtocol=0,
RichCompare=0,
TwinPointer="",
Delete=0,
Reference=0,
Initialization=0,
DisableNotify=0,
DescriptorGetter=0,
DescriptorSetter=0,
Documentation=None,
Methode=None,
Attribute=None,
Sequence=None,
CustomAttributes="",
ClassDeclarations="",
ForwardDeclarations="",
):
self.Name = Name
self.PythonName = PythonName
self.Include = Include
self.Father = Father
self.Twin = Twin
self.Namespace = Namespace
self.FatherInclude = FatherInclude
self.FatherNamespace = FatherNamespace
self.Constructor = Constructor
self.NumberProtocol = NumberProtocol
self.RichCompare = RichCompare
self.TwinPointer = TwinPointer
self.Delete = Delete
self.Reference = Reference
self.Initialization = Initialization
self.DisableNotify = DisableNotify
self.DescriptorGetter = DescriptorGetter
self.DescriptorSetter = DescriptorSetter
self.Documentation = Documentation
if Methode is None:
self.Methode = []
else:
self.Methode = Methode
if Attribute is None:
self.Attribute = []
else:
self.Attribute = Attribute
self.Sequence = Sequence
self.CustomAttributes = CustomAttributes
self.ClassDeclarations = ClassDeclarations
self.ForwardDeclarations = ForwardDeclarations
def factory(*args_, **kwargs_):
if PythonExport.subclass:
return PythonExport.subclass(*args_, **kwargs_)
else:
return PythonExport(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self):
return self.Documentation
def setDocumentation(self, Documentation):
self.Documentation = Documentation
def getMethode(self):
return self.Methode
def setMethode(self, Methode):
self.Methode = Methode
def addMethode(self, value):
self.Methode.append(value)
def insertMethode(self, index, value):
self.Methode[index] = value
def getAttribute(self):
return self.Attribute
def setAttribute(self, Attribute):
self.Attribute = Attribute
def addAttribute(self, value):
self.Attribute.append(value)
def insertAttribute(self, index, value):
self.Attribute[index] = value
def getSequence(self):
return self.Sequence
def setSequence(self, Sequence):
self.Sequence = Sequence
def getCustomattributes(self):
return self.CustomAttributes
def setCustomattributes(self, CustomAttributes):
self.CustomAttributes = CustomAttributes
def getClassdeclarations(self):
return self.ClassDeclarations
def setClassdeclarations(self, ClassDeclarations):
self.ClassDeclarations = ClassDeclarations
def getForwarddeclarations(self):
return self.ForwardDeclarations
def setForwarddeclarations(self, ForwardDeclarations):
self.ForwardDeclarations = ForwardDeclarations
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def getPythonname(self):
return self.PythonName
def setPythonname(self, PythonName):
self.PythonName = PythonName
def getInclude(self):
return self.Include
def setInclude(self, Include):
self.Include = Include
def getFather(self):
return self.Father
def setFather(self, Father):
self.Father = Father
def getTwin(self):
return self.Twin
def setTwin(self, Twin):
self.Twin = Twin
def getNamespace(self):
return self.Namespace
def setNamespace(self, Namespace):
self.Namespace = Namespace
def getFatherinclude(self):
return self.FatherInclude
def setFatherinclude(self, FatherInclude):
self.FatherInclude = FatherInclude
def getFathernamespace(self):
return self.FatherNamespace
def setFathernamespace(self, FatherNamespace):
self.FatherNamespace = FatherNamespace
def getConstructor(self):
return self.Constructor
def setConstructor(self, Constructor):
self.Constructor = Constructor
def getNumberprotocol(self):
return self.NumberProtocol
def setNumberprotocol(self, NumberProtocol):
self.NumberProtocol = NumberProtocol
def getRichcompare(self):
return self.RichCompare
def setRichcompare(self, RichCompare):
self.RichCompare = RichCompare
def getTwinpointer(self):
return self.TwinPointer
def setTwinpointer(self, TwinPointer):
self.TwinPointer = TwinPointer
def getDelete(self):
return self.Delete
def setDelete(self, Delete):
self.Delete = Delete
def getReference(self):
return self.Reference
def setReference(self, Reference):
self.Reference = Reference
def getInitialization(self):
return self.Initialization
def setInitialization(self, Initialization):
self.Initialization = Initialization
def getDisablenotify(self):
return self.DisableNotify
def setDisablenotify(self, DisableNotify):
self.DisableNotify = DisableNotify
def getDescriptorgetter(self):
return self.DescriptorGetter
def setDescriptorgetter(self, DescriptorGetter):
self.DescriptorGetter = DescriptorGetter
def getDescriptorsetter(self):
return self.DescriptorSetter
def setDescriptorsetter(self, DescriptorSetter):
self.DescriptorSetter = DescriptorSetter
def export(self, outfile, level, name_="PythonExport"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="PythonExport")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="PythonExport"):
outfile.write(' Name="%s"' % (self.getName(),))
if self.getPythonname() is not None:
outfile.write(' PythonName="%s"' % (self.getPythonname(),))
outfile.write(' Include="%s"' % (self.getInclude(),))
outfile.write(' Father="%s"' % (self.getFather(),))
outfile.write(' Twin="%s"' % (self.getTwin(),))
outfile.write(' Namespace="%s"' % (self.getNamespace(),))
outfile.write(' FatherInclude="%s"' % (self.getFatherinclude(),))
outfile.write(' FatherNamespace="%s"' % (self.getFathernamespace(),))
if self.getConstructor() is not None:
outfile.write(' Constructor="%s"' % (self.getConstructor(),))
if self.getNumberprotocol() is not None:
outfile.write(' NumberProtocol="%s"' % (self.getNumberprotocol(),))
if self.getRichcompare() is not None:
outfile.write(' RichCompare="%s"' % (self.getRichcompare(),))
outfile.write(' TwinPointer="%s"' % (self.getTwinpointer(),))
if self.getDelete() is not None:
outfile.write(' Delete="%s"' % (self.getDelete(),))
if self.getReference() is not None:
outfile.write(' Reference="%s"' % (self.getReference(),))
if self.getInitialization() is not None:
outfile.write(' Initialization="%s"' % (self.getInitialization(),))
if self.getDisablenotify() is not None:
outfile.write(' DisableNotify="%s"' % (self.getDisablenotify(),))
if self.getDescriptorgetter() is not None:
outfile.write(' DescriptorGetter="%s"' % (self.getDescriptorgetter(),))
if self.getDescriptorsetter() is not None:
outfile.write(' DescriptorSetter="%s"' % (self.getDescriptorsetter(),))
def exportChildren(self, outfile, level, name_="PythonExport"):
if self.Documentation:
self.Documentation.export(outfile, level)
for Methode_ in self.getMethode():
Methode_.export(outfile, level)
for Attribute_ in self.getAttribute():
Attribute_.export(outfile, level)
if self.Sequence:
self.Sequence.export(outfile, level)
showIndent(outfile, level)
outfile.write(
"<CustomAttributes>%s</CustomAttributes>\n"
% quote_xml(self.getCustomattributes())
)
showIndent(outfile, level)
outfile.write(
"<ClassDeclarations>%s</ClassDeclarations>\n"
% quote_xml(self.getClassdeclarations())
)
showIndent(outfile, level)
outfile.write(
"<ForwardDeclarations>%s</ForwardDeclarations>\n"
% quote_xml(self.getForwarddeclarations())
)
def exportLiteral(self, outfile, level, name_="PythonExport"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('PythonName = "%s",\n' % (self.getPythonname(),))
showIndent(outfile, level)
outfile.write('Include = "%s",\n' % (self.getInclude(),))
showIndent(outfile, level)
outfile.write('Father = "%s",\n' % (self.getFather(),))
showIndent(outfile, level)
outfile.write('Twin = "%s",\n' % (self.getTwin(),))
showIndent(outfile, level)
outfile.write('Namespace = "%s",\n' % (self.getNamespace(),))
showIndent(outfile, level)
outfile.write('FatherInclude = "%s",\n' % (self.getFatherinclude(),))
showIndent(outfile, level)
outfile.write('FatherNamespace = "%s",\n' % (self.getFathernamespace(),))
showIndent(outfile, level)
outfile.write('Constructor = "%s",\n' % (self.getConstructor(),))
showIndent(outfile, level)
outfile.write('NumberProtocol = "%s",\n' % (self.getNumberprotocol(),))
showIndent(outfile, level)
outfile.write('RichCompare = "%s",\n' % (self.getRichcompare(),))
showIndent(outfile, level)
outfile.write('TwinPointer = "%s",\n' % (self.getTwinpointer(),))
showIndent(outfile, level)
outfile.write('Delete = "%s",\n' % (self.getDelete(),))
showIndent(outfile, level)
outfile.write('Reference = "%s",\n' % (self.getReference(),))
showIndent(outfile, level)
outfile.write('Initialization = "%s",\n' % (self.getInitialization(),))
showIndent(outfile, level)
outfile.write('DisableNotify = "%s",\n' % (self.getDisablenotify(),))
showIndent(outfile, level)
outfile.write('DescriptorGetter = "%s",\n' % (self.getDescriptorgetter(),))
showIndent(outfile, level)
outfile.write('DescriptorSetter = "%s",\n' % (self.getDescriptorsetter(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write("Documentation=Documentation(\n")
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
showIndent(outfile, level)
outfile.write("Methode=[\n")
level += 1
for Methode in self.Methode:
showIndent(outfile, level)
outfile.write("Methode(\n")
Methode.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
showIndent(outfile, level)
outfile.write("Attribute=[\n")
level += 1
for Attribute in self.Attribute:
showIndent(outfile, level)
outfile.write("Attribute(\n")
Attribute.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
if self.Sequence:
showIndent(outfile, level)
outfile.write("Sequence=Sequence(\n")
self.Sequence.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
showIndent(outfile, level)
outfile.write(
"CustomAttributes=%s,\n" % quote_python(self.getCustomattributes())
)
showIndent(outfile, level)
outfile.write(
"ClassDeclarations=%s,\n" % quote_python(self.getClassdeclarations())
)
showIndent(outfile, level)
outfile.write(
"ForwardDeclarations=%s,\n" % quote_python(self.getForwarddeclarations())
)
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
if attrs.get("PythonName"):
self.PythonName = attrs.get("PythonName").value
if attrs.get("Include"):
self.Include = attrs.get("Include").value
if attrs.get("Father"):
self.Father = attrs.get("Father").value
if attrs.get("Twin"):
self.Twin = attrs.get("Twin").value
if attrs.get("Namespace"):
self.Namespace = attrs.get("Namespace").value
if attrs.get("FatherInclude"):
self.FatherInclude = attrs.get("FatherInclude").value
if attrs.get("FatherNamespace"):
self.FatherNamespace = attrs.get("FatherNamespace").value
if attrs.get("Constructor"):
if attrs.get("Constructor").value in ("true", "1"):
self.Constructor = 1
elif attrs.get("Constructor").value in ("false", "0"):
self.Constructor = 0
else:
raise ValueError("Bad boolean attribute (Constructor)")
if attrs.get("NumberProtocol"):
if attrs.get("NumberProtocol").value in ("true", "1"):
self.NumberProtocol = 1
elif attrs.get("NumberProtocol").value in ("false", "0"):
self.NumberProtocol = 0
else:
raise ValueError("Bad boolean attribute (NumberProtocol)")
if attrs.get("RichCompare"):
if attrs.get("RichCompare").value in ("true", "1"):
self.RichCompare = 1
elif attrs.get("RichCompare").value in ("false", "0"):
self.RichCompare = 0
else:
raise ValueError("Bad boolean attribute (RichCompare)")
if attrs.get("TwinPointer"):
self.TwinPointer = attrs.get("TwinPointer").value
if attrs.get("Delete"):
if attrs.get("Delete").value in ("true", "1"):
self.Delete = 1
elif attrs.get("Delete").value in ("false", "0"):
self.Delete = 0
else:
raise ValueError("Bad boolean attribute (Delete)")
if attrs.get("Reference"):
if attrs.get("Reference").value in ("true", "1"):
self.Reference = 1
elif attrs.get("Reference").value in ("false", "0"):
self.Reference = 0
else:
raise ValueError("Bad boolean attribute (Reference)")
if attrs.get("Initialization"):
if attrs.get("Initialization").value in ("true", "1"):
self.Initialization = 1
elif attrs.get("Initialization").value in ("false", "0"):
self.Initialization = 0
else:
raise ValueError("Bad boolean attribute (Initialization)")
if attrs.get("DisableNotify"):
if attrs.get("DisableNotify").value in ("true", "1"):
self.DisableNotify = 1
elif attrs.get("DisableNotify").value in ("false", "0"):
self.DisableNotify = 0
else:
raise ValueError("Bad boolean attribute (DisableNotify)")
if attrs.get("DescriptorGetter"):
if attrs.get("DescriptorGetter").value in ("true", "1"):
self.DescriptorGetter = 1
elif attrs.get("DescriptorGetter").value in ("false", "0"):
self.DescriptorGetter = 0
else:
raise ValueError("Bad boolean attribute (DescriptorGetter)")
if attrs.get("DescriptorSetter"):
if attrs.get("DescriptorSetter").value in ("true", "1"):
self.DescriptorSetter = 1
elif attrs.get("DescriptorSetter").value in ("false", "0"):
self.DescriptorSetter = 0
else:
raise ValueError("Bad boolean attribute (DescriptorSetter)")
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Documentation":
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Methode":
obj_ = Methode.factory()
obj_.build(child_)
self.Methode.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Attribute":
obj_ = Attribute.factory()
obj_.build(child_)
self.Attribute.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Sequence":
obj_ = Sequence.factory()
obj_.build(child_)
self.setSequence(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "CustomAttributes":
CustomAttributes_ = ""
for text__content_ in child_.childNodes:
CustomAttributes_ += text__content_.nodeValue
self.CustomAttributes = CustomAttributes_
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "ClassDeclarations":
ClassDeclarations_ = ""
for text__content_ in child_.childNodes:
ClassDeclarations_ += text__content_.nodeValue
self.ClassDeclarations = ClassDeclarations_
elif (
child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "ForwardDeclarations"
):
ForwardDeclarations_ = ""
for text__content_ in child_.childNodes:
ForwardDeclarations_ += text__content_.nodeValue
self.ForwardDeclarations = ForwardDeclarations_
# end class PythonExport
class Methode:
subclass = None
def __init__(
self,
Name="",
Const=0,
Keyword=0,
NoArgs=0,
Class=0,
Static=0,
Documentation=None,
Parameter=None,
):
self.Name = Name
self.Const = Const
self.Keyword = Keyword
self.NoArgs = NoArgs
self.Class = Class
self.Static = Static
self.Documentation = Documentation
if Parameter is None:
self.Parameter = []
else:
self.Parameter = Parameter
def factory(*args_, **kwargs_):
if Methode.subclass:
return Methode.subclass(*args_, **kwargs_)
else:
return Methode(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self):
return self.Documentation
def setDocumentation(self, Documentation):
self.Documentation = Documentation
def getParameter(self):
return self.Parameter
def setParameter(self, Parameter):
self.Parameter = Parameter
def addParameter(self, value):
self.Parameter.append(value)
def insertParameter(self, index, value):
self.Parameter[index] = value
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def getConst(self):
return self.Const
def setConst(self, Const):
self.Const = Const
def getKeyword(self):
return self.Keyword
def setKeyword(self, Keyword):
self.Keyword = Keyword
def getNoargs(self):
return self.NoArgs
def setNoargs(self, NoArgs):
self.NoArgs = NoArgs
def getClass(self):
return self.Class
def setClass(self, Class):
self.Class = Class
def getStatic(self):
return self.Static
def setStatic(self, Static):
self.Static = Static
def export(self, outfile, level, name_="Methode"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="Methode")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Methode"):
outfile.write(' Name="%s"' % (self.getName(),))
if self.getConst() is not None:
outfile.write(' Const="%s"' % (self.getConst(),))
if self.getKeyword() is not None:
outfile.write(' Keyword="%s"' % (self.getKeyword(),))
if self.getNoargs() is not None:
outfile.write(' NoArgs="%s"' % (self.getNoargs(),))
if self.getClass() is not None:
outfile.write(' Class="%s"' % (self.getClass(),))
if self.getStatic() is not None:
outfile.write(' Static="%s"' % (self.getStatic(),))
def exportChildren(self, outfile, level, name_="Methode"):
if self.Documentation:
self.Documentation.export(outfile, level)
for Parameter_ in self.getParameter():
Parameter_.export(outfile, level)
def exportLiteral(self, outfile, level, name_="Methode"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('Const = "%s",\n' % (self.getConst(),))
showIndent(outfile, level)
outfile.write('Keyword = "%s",\n' % (self.getKeyword(),))
showIndent(outfile, level)
outfile.write('NoArgs = "%s",\n' % (self.getNoargs(),))
showIndent(outfile, level)
outfile.write('Class = "%s",\n' % (self.getClass(),))
showIndent(outfile, level)
outfile.write('Static = "%s",\n' % (self.getStatic(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write("Documentation=Documentation(\n")
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
showIndent(outfile, level)
outfile.write("Parameter=[\n")
level += 1
for Parameter in self.Parameter:
showIndent(outfile, level)
outfile.write("Parameter(\n")
Parameter.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
if attrs.get("Const"):
if attrs.get("Const").value in ("true", "1"):
self.Const = 1
elif attrs.get("Const").value in ("false", "0"):
self.Const = 0
else:
raise ValueError("Bad boolean attribute (Const)")
if attrs.get("Keyword"):
if attrs.get("Keyword").value in ("true", "1"):
self.Keyword = 1
elif attrs.get("Keyword").value in ("false", "0"):
self.Keyword = 0
else:
raise ValueError("Bad boolean attribute (Keyword)")
if attrs.get("NoArgs"):
if attrs.get("NoArgs").value in ("true", "1"):
self.NoArgs = 1
elif attrs.get("NoArgs").value in ("false", "0"):
self.NoArgs = 0
else:
raise ValueError("Bad boolean attribute (NoArgs)")
if attrs.get("Class"):
if attrs.get("Class").value in ("true", "1"):
self.Class = 1
elif attrs.get("Class").value in ("false", "0"):
self.Class = 0
else:
raise ValueError("Bad boolean attribute (Class)")
if attrs.get("Static"):
if attrs.get("Static").value in ("true", "1"):
self.Static = 1
elif attrs.get("Static").value in ("false", "0"):
self.Static = 0
else:
raise ValueError("Bad boolean attribute (Static)")
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Documentation":
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Parameter":
obj_ = Parameter.factory()
obj_.build(child_)
self.Parameter.append(obj_)
# end class Methode
class Attribute:
subclass = None
def __init__(self, Name="", ReadOnly=0, Documentation=None, Parameter=None):
self.Name = Name
self.ReadOnly = ReadOnly
self.Documentation = Documentation
self.Parameter = Parameter
def factory(*args_, **kwargs_):
if Attribute.subclass:
return Attribute.subclass(*args_, **kwargs_)
else:
return Attribute(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self):
return self.Documentation
def setDocumentation(self, Documentation):
self.Documentation = Documentation
def getParameter(self):
return self.Parameter
def setParameter(self, Parameter):
self.Parameter = Parameter
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def getReadonly(self):
return self.ReadOnly
def setReadonly(self, ReadOnly):
self.ReadOnly = ReadOnly
def export(self, outfile, level, name_="Attribute"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="Attribute")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Attribute"):
outfile.write(' Name="%s"' % (self.getName(),))
outfile.write(' ReadOnly="%s"' % (self.getReadonly(),))
def exportChildren(self, outfile, level, name_="Attribute"):
if self.Documentation:
self.Documentation.export(outfile, level)
if self.Parameter:
self.Parameter.export(outfile, level)
def exportLiteral(self, outfile, level, name_="Attribute"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('ReadOnly = "%s",\n' % (self.getReadonly(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write("Documentation=Documentation(\n")
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
if self.Parameter:
showIndent(outfile, level)
outfile.write("Parameter=Parameter(\n")
self.Parameter.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
if attrs.get("ReadOnly"):
if attrs.get("ReadOnly").value in ("true", "1"):
self.ReadOnly = 1
elif attrs.get("ReadOnly").value in ("false", "0"):
self.ReadOnly = 0
else:
raise ValueError("Bad boolean attribute (ReadOnly)")
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Documentation":
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Parameter":
obj_ = Parameter.factory()
obj_.build(child_)
self.setParameter(obj_)
# end class Attribute
class Sequence:
subclass = None
def __init__(
self,
sq_length=0,
sq_concat=0,
sq_repeat=0,
sq_item=0,
mp_subscript=0,
sq_ass_item=0,
mp_ass_subscript=0,
sq_contains=0,
sq_inplace_concat=0,
sq_inplace_repeat=0,
valueOf_="",
):
self.sq_length = sq_length
self.sq_concat = sq_concat
self.sq_repeat = sq_repeat
self.sq_item = sq_item
self.mp_subscript = mp_subscript
self.sq_ass_item = sq_ass_item
self.mp_ass_subscript = mp_ass_subscript
self.sq_contains = sq_contains
self.sq_inplace_concat = sq_inplace_concat
self.sq_inplace_repeat = sq_inplace_repeat
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if Sequence.subclass:
return Sequence.subclass(*args_, **kwargs_)
else:
return Sequence(*args_, **kwargs_)
factory = staticmethod(factory)
def getSq_length(self):
return self.sq_length
def setSq_length(self, sq_length):
self.sq_length = sq_length
def getSq_concat(self):
return self.sq_concat
def setSq_concat(self, sq_concat):
self.sq_concat = sq_concat
def getSq_repeat(self):
return self.sq_repeat
def setSq_repeat(self, sq_repeat):
self.sq_repeat = sq_repeat
def getSq_item(self):
return self.sq_item
def setSq_item(self, sq_item):
self.sq_item = sq_item
def getMp_subscript(self):
return self.mp_subscript
def setMp_subscript(self, mp_subscript):
self.mp_subscript = mp_subscript
def getSq_ass_item(self):
return self.sq_ass_item
def setSq_ass_item(self, sq_ass_item):
self.sq_ass_item = sq_ass_item
def getMp_ass_subscript(self):
return self.mp_ass_subscript
def setMp_ass_subscript(self, mp_ass_subscript):
self.mp_ass_subscript = mp_ass_subscript
def getSq_contains(self):
return self.sq_contains
def setSq_contains(self, sq_contains):
self.sq_contains = sq_contains
def getSq_inplace_concat(self):
return self.sq_inplace_concat
def setSq_inplace_concat(self, sq_inplace_concat):
self.sq_inplace_concat = sq_inplace_concat
def getSq_inplace_repeat(self):
return self.sq_inplace_repeat
def setSq_inplace_repeat(self, sq_inplace_repeat):
self.sq_inplace_repeat = sq_inplace_repeat
def getValueOf_(self):
return self.valueOf_
def setValueOf_(self, valueOf_):
self.valueOf_ = valueOf_
def export(self, outfile, level, name_="Sequence"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="Sequence")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Sequence"):
outfile.write(' sq_length="%s"' % (self.getSq_length(),))
outfile.write(' sq_concat="%s"' % (self.getSq_concat(),))
outfile.write(' sq_repeat="%s"' % (self.getSq_repeat(),))
outfile.write(' sq_item="%s"' % (self.getSq_item(),))
outfile.write(' mp_subscript="%s"' % (self.getMp_subscript(),))
outfile.write(' sq_ass_item="%s"' % (self.getSq_ass_item(),))
outfile.write(' mp_ass_subscript="%s"' % (self.getMp_ass_subscript(),))
outfile.write(' sq_contains="%s"' % (self.getSq_contains(),))
outfile.write(' sq_inplace_concat="%s"' % (self.getSq_inplace_concat(),))
outfile.write(' sq_inplace_repeat="%s"' % (self.getSq_inplace_repeat(),))
def exportChildren(self, outfile, level, name_="Sequence"):
outfile.write(self.valueOf_)
def exportLiteral(self, outfile, level, name_="Sequence"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('sq_length = "%s",\n' % (self.getSq_length(),))
showIndent(outfile, level)
outfile.write('sq_concat = "%s",\n' % (self.getSq_concat(),))
showIndent(outfile, level)
outfile.write('sq_repeat = "%s",\n' % (self.getSq_repeat(),))
showIndent(outfile, level)
outfile.write('sq_item = "%s",\n' % (self.getSq_item(),))
showIndent(outfile, level)
outfile.write('mp_subscript = "%s",\n' % (self.getMp_subscript(),))
showIndent(outfile, level)
outfile.write('sq_ass_item = "%s",\n' % (self.getSq_ass_item(),))
showIndent(outfile, level)
outfile.write('mp_ass_subscript = "%s",\n' % (self.getMp_ass_subscript(),))
showIndent(outfile, level)
outfile.write('sq_contains = "%s",\n' % (self.getSq_contains(),))
showIndent(outfile, level)
outfile.write('sq_inplace_concat = "%s",\n' % (self.getSq_inplace_concat(),))
showIndent(outfile, level)
outfile.write('sq_inplace_repeat = "%s",\n' % (self.getSq_inplace_repeat(),))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("sq_length"):
if attrs.get("sq_length").value in ("true", "1"):
self.sq_length = 1
elif attrs.get("sq_length").value in ("false", "0"):
self.sq_length = 0
else:
raise ValueError("Bad boolean attribute (sq_length)")
if attrs.get("sq_concat"):
if attrs.get("sq_concat").value in ("true", "1"):
self.sq_concat = 1
elif attrs.get("sq_concat").value in ("false", "0"):
self.sq_concat = 0
else:
raise ValueError("Bad boolean attribute (sq_concat)")
if attrs.get("sq_repeat"):
if attrs.get("sq_repeat").value in ("true", "1"):
self.sq_repeat = 1
elif attrs.get("sq_repeat").value in ("false", "0"):
self.sq_repeat = 0
else:
raise ValueError("Bad boolean attribute (sq_repeat)")
if attrs.get("sq_item"):
if attrs.get("sq_item").value in ("true", "1"):
self.sq_item = 1
elif attrs.get("sq_item").value in ("false", "0"):
self.sq_item = 0
else:
raise ValueError("Bad boolean attribute (sq_item)")
if attrs.get("mp_subscript"):
if attrs.get("mp_subscript").value in ("true", "1"):
self.mp_subscript = 1
elif attrs.get("mp_subscript").value in ("false", "0"):
self.mp_subscript = 0
else:
raise ValueError("Bad boolean attribute (mp_subscript)")
if attrs.get("sq_ass_item"):
if attrs.get("sq_ass_item").value in ("true", "1"):
self.sq_ass_item = 1
elif attrs.get("sq_ass_item").value in ("false", "0"):
self.sq_ass_item = 0
else:
raise ValueError("Bad boolean attribute (sq_ass_item)")
if attrs.get("mp_ass_subscript"):
if attrs.get("mp_ass_subscript").value in ("true", "1"):
self.mp_ass_subscript = 1
elif attrs.get("mp_ass_subscript").value in ("false", "0"):
self.mp_ass_subscript = 0
else:
raise ValueError("Bad boolean attribute (mp_ass_subscript)")
if attrs.get("sq_contains"):
if attrs.get("sq_contains").value in ("true", "1"):
self.sq_contains = 1
elif attrs.get("sq_contains").value in ("false", "0"):
self.sq_contains = 0
else:
raise ValueError("Bad boolean attribute (sq_contains)")
if attrs.get("sq_inplace_concat"):
if attrs.get("sq_inplace_concat").value in ("true", "1"):
self.sq_inplace_concat = 1
elif attrs.get("sq_inplace_concat").value in ("false", "0"):
self.sq_inplace_concat = 0
else:
raise ValueError("Bad boolean attribute (sq_inplace_concat)")
if attrs.get("sq_inplace_repeat"):
if attrs.get("sq_inplace_repeat").value in ("true", "1"):
self.sq_inplace_repeat = 1
elif attrs.get("sq_inplace_repeat").value in ("false", "0"):
self.sq_inplace_repeat = 0
else:
raise ValueError("Bad boolean attribute (sq_inplace_repeat)")
def buildChildren(self, child_, nodeName_):
self.valueOf_ = ""
for child in child_.childNodes:
if child.nodeType == Node.TEXT_NODE:
self.valueOf_ += child.nodeValue
# end class Sequence
class Module:
subclass = None
def __init__(self, Name="", Documentation=None, Dependencies=None, Content=None):
self.Name = Name
self.Documentation = Documentation
self.Dependencies = Dependencies
self.Content = Content
def factory(*args_, **kwargs_):
if Module.subclass:
return Module.subclass(*args_, **kwargs_)
else:
return Module(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self):
return self.Documentation
def setDocumentation(self, Documentation):
self.Documentation = Documentation
def getDependencies(self):
return self.Dependencies
def setDependencies(self, Dependencies):
self.Dependencies = Dependencies
def getContent(self):
return self.Content
def setContent(self, Content):
self.Content = Content
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def export(self, outfile, level, name_="Module"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="Module")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Module"):
outfile.write(' Name="%s"' % (self.getName(),))
def exportChildren(self, outfile, level, name_="Module"):
if self.Documentation:
self.Documentation.export(outfile, level)
if self.Dependencies:
self.Dependencies.export(outfile, level)
if self.Content:
self.Content.export(outfile, level)
def exportLiteral(self, outfile, level, name_="Module"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write("Documentation=Documentation(\n")
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
if self.Dependencies:
showIndent(outfile, level)
outfile.write("Dependencies=Dependencies(\n")
self.Dependencies.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
if self.Content:
showIndent(outfile, level)
outfile.write("Content=Content(\n")
self.Content.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Documentation":
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Dependencies":
obj_ = Dependencies.factory()
obj_.build(child_)
self.setDependencies(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Content":
obj_ = Content.factory()
obj_.build(child_)
self.setContent(obj_)
# end class Module
class Dependencies:
subclass = None
def __init__(self, Module=None):
if Module is None:
self.Module = []
else:
self.Module = Module
def factory(*args_, **kwargs_):
if Dependencies.subclass:
return Dependencies.subclass(*args_, **kwargs_)
else:
return Dependencies(*args_, **kwargs_)
factory = staticmethod(factory)
def getModule(self):
return self.Module
def setModule(self, Module):
self.Module = Module
def addModule(self, value):
self.Module.append(value)
def insertModule(self, index, value):
self.Module[index] = value
def export(self, outfile, level, name_="Dependencies"):
showIndent(outfile, level)
outfile.write("<%s>\n" % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Dependencies"):
pass
def exportChildren(self, outfile, level, name_="Dependencies"):
for Module_ in self.getModule():
Module_.export(outfile, level)
def exportLiteral(self, outfile, level, name_="Dependencies"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write("Module=[\n")
level += 1
for Module in self.Module:
showIndent(outfile, level)
outfile.write("Module(\n")
Module.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Module":
obj_ = Module.factory()
obj_.build(child_)
self.Module.append(obj_)
# end class Dependencies
class Content:
subclass = None
def __init__(
self,
Property=None,
Feature=None,
DocObject=None,
GuiCommand=None,
PreferencesPage=None,
):
if Property is None:
self.Property = []
else:
self.Property = Property
if Feature is None:
self.Feature = []
else:
self.Feature = Feature
if DocObject is None:
self.DocObject = []
else:
self.DocObject = DocObject
if GuiCommand is None:
self.GuiCommand = []
else:
self.GuiCommand = GuiCommand
if PreferencesPage is None:
self.PreferencesPage = []
else:
self.PreferencesPage = PreferencesPage
def factory(*args_, **kwargs_):
if Content.subclass:
return Content.subclass(*args_, **kwargs_)
else:
return Content(*args_, **kwargs_)
factory = staticmethod(factory)
def getProperty(self):
return self.Property
def setProperty(self, Property):
self.Property = Property
def addProperty(self, value):
self.Property.append(value)
def insertProperty(self, index, value):
self.Property[index] = value
def getFeature(self):
return self.Feature
def setFeature(self, Feature):
self.Feature = Feature
def addFeature(self, value):
self.Feature.append(value)
def insertFeature(self, index, value):
self.Feature[index] = value
def getDocobject(self):
return self.DocObject
def setDocobject(self, DocObject):
self.DocObject = DocObject
def addDocobject(self, value):
self.DocObject.append(value)
def insertDocobject(self, index, value):
self.DocObject[index] = value
def getGuicommand(self):
return self.GuiCommand
def setGuicommand(self, GuiCommand):
self.GuiCommand = GuiCommand
def addGuicommand(self, value):
self.GuiCommand.append(value)
def insertGuicommand(self, index, value):
self.GuiCommand[index] = value
def getPreferencespage(self):
return self.PreferencesPage
def setPreferencespage(self, PreferencesPage):
self.PreferencesPage = PreferencesPage
def addPreferencespage(self, value):
self.PreferencesPage.append(value)
def insertPreferencespage(self, index, value):
self.PreferencesPage[index] = value
def export(self, outfile, level, name_="Content"):
showIndent(outfile, level)
outfile.write("<%s>\n" % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Content"):
pass
def exportChildren(self, outfile, level, name_="Content"):
for Property_ in self.getProperty():
Property_.export(outfile, level)
for Feature_ in self.getFeature():
Feature_.export(outfile, level)
for DocObject_ in self.getDocobject():
DocObject_.export(outfile, level)
for GuiCommand_ in self.getGuicommand():
showIndent(outfile, level)
outfile.write("<GuiCommand>%s</GuiCommand>\n" % quote_xml(GuiCommand_))
for PreferencesPage_ in self.getPreferencespage():
showIndent(outfile, level)
outfile.write(
"<PreferencesPage>%s</PreferencesPage>\n" % quote_xml(PreferencesPage_)
)
def exportLiteral(self, outfile, level, name_="Content"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write("Property=[\n")
level += 1
for Property in self.Property:
showIndent(outfile, level)
outfile.write("Property(\n")
Property.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
showIndent(outfile, level)
outfile.write("Feature=[\n")
level += 1
for Feature in self.Feature:
showIndent(outfile, level)
outfile.write("Feature(\n")
Feature.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
showIndent(outfile, level)
outfile.write("DocObject=[\n")
level += 1
for DocObject in self.DocObject:
showIndent(outfile, level)
outfile.write("DocObject(\n")
DocObject.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
showIndent(outfile, level)
outfile.write("GuiCommand=[\n")
level += 1
for GuiCommand in self.GuiCommand:
showIndent(outfile, level)
outfile.write("%s,\n" % quote_python(GuiCommand))
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
showIndent(outfile, level)
outfile.write("PreferencesPage=[\n")
level += 1
for PreferencesPage in self.PreferencesPage:
showIndent(outfile, level)
outfile.write("%s,\n" % quote_python(PreferencesPage))
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Property":
obj_ = Property.factory()
obj_.build(child_)
self.Property.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Feature":
obj_ = Feature.factory()
obj_.build(child_)
self.Feature.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "DocObject":
obj_ = DocObject.factory()
obj_.build(child_)
self.DocObject.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "GuiCommand":
GuiCommand_ = ""
for text__content_ in child_.childNodes:
GuiCommand_ += text__content_.nodeValue
self.GuiCommand.append(GuiCommand_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "PreferencesPage":
PreferencesPage_ = ""
for text__content_ in child_.childNodes:
PreferencesPage_ += text__content_.nodeValue
self.PreferencesPage.append(PreferencesPage_)
# end class Content
class Feature:
subclass = None
def __init__(self, Name="", Documentation=None, Property=None, ViewProvider=None):
self.Name = Name
self.Documentation = Documentation
if Property is None:
self.Property = []
else:
self.Property = Property
self.ViewProvider = ViewProvider
def factory(*args_, **kwargs_):
if Feature.subclass:
return Feature.subclass(*args_, **kwargs_)
else:
return Feature(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self):
return self.Documentation
def setDocumentation(self, Documentation):
self.Documentation = Documentation
def getProperty(self):
return self.Property
def setProperty(self, Property):
self.Property = Property
def addProperty(self, value):
self.Property.append(value)
def insertProperty(self, index, value):
self.Property[index] = value
def getViewprovider(self):
return self.ViewProvider
def setViewprovider(self, ViewProvider):
self.ViewProvider = ViewProvider
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def export(self, outfile, level, name_="Feature"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="Feature")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Feature"):
outfile.write(' Name="%s"' % (self.getName(),))
def exportChildren(self, outfile, level, name_="Feature"):
if self.Documentation:
self.Documentation.export(outfile, level)
for Property_ in self.getProperty():
Property_.export(outfile, level)
if self.ViewProvider:
self.ViewProvider.export(outfile, level)
def exportLiteral(self, outfile, level, name_="Feature"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write("Documentation=Documentation(\n")
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
showIndent(outfile, level)
outfile.write("Property=[\n")
level += 1
for Property in self.Property:
showIndent(outfile, level)
outfile.write("Property(\n")
Property.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
if self.ViewProvider:
showIndent(outfile, level)
outfile.write("ViewProvider=ViewProvider(\n")
self.ViewProvider.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Documentation":
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Property":
obj_ = Property.factory()
obj_.build(child_)
self.Property.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "ViewProvider":
obj_ = ViewProvider.factory()
obj_.build(child_)
self.setViewprovider(obj_)
# end class Feature
class DocObject:
subclass = None
def __init__(self, Name="", Documentation=None, Property=None):
self.Name = Name
self.Documentation = Documentation
if Property is None:
self.Property = []
else:
self.Property = Property
def factory(*args_, **kwargs_):
if DocObject.subclass:
return DocObject.subclass(*args_, **kwargs_)
else:
return DocObject(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self):
return self.Documentation
def setDocumentation(self, Documentation):
self.Documentation = Documentation
def getProperty(self):
return self.Property
def setProperty(self, Property):
self.Property = Property
def addProperty(self, value):
self.Property.append(value)
def insertProperty(self, index, value):
self.Property[index] = value
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def export(self, outfile, level, name_="DocObject"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="DocObject")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="DocObject"):
outfile.write(' Name="%s"' % (self.getName(),))
def exportChildren(self, outfile, level, name_="DocObject"):
if self.Documentation:
self.Documentation.export(outfile, level)
for Property_ in self.getProperty():
Property_.export(outfile, level)
def exportLiteral(self, outfile, level, name_="DocObject"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write("Documentation=Documentation(\n")
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
showIndent(outfile, level)
outfile.write("Property=[\n")
level += 1
for Property in self.Property:
showIndent(outfile, level)
outfile.write("Property(\n")
Property.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Documentation":
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Property":
obj_ = Property.factory()
obj_.build(child_)
self.Property.append(obj_)
# end class DocObject
class Property:
subclass = None
def __init__(self, Name="", Type="", StartValue="", Documentation=None):
self.Name = Name
self.Type = Type
self.StartValue = StartValue
self.Documentation = Documentation
def factory(*args_, **kwargs_):
if Property.subclass:
return Property.subclass(*args_, **kwargs_)
else:
return Property(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self):
return self.Documentation
def setDocumentation(self, Documentation):
self.Documentation = Documentation
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def getType(self):
return self.Type
def setType(self, Type):
self.Type = Type
def getStartvalue(self):
return self.StartValue
def setStartvalue(self, StartValue):
self.StartValue = StartValue
def export(self, outfile, level, name_="Property"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="Property")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Property"):
outfile.write(' Name="%s"' % (self.getName(),))
outfile.write(' Type="%s"' % (self.getType(),))
if self.getStartvalue() is not None:
outfile.write(' StartValue="%s"' % (self.getStartvalue(),))
def exportChildren(self, outfile, level, name_="Property"):
if self.Documentation:
self.Documentation.export(outfile, level)
def exportLiteral(self, outfile, level, name_="Property"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('Type = "%s",\n' % (self.getType(),))
showIndent(outfile, level)
outfile.write('StartValue = "%s",\n' % (self.getStartvalue(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write("Documentation=Documentation(\n")
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
if attrs.get("Type"):
self.Type = attrs.get("Type").value
if attrs.get("StartValue"):
self.StartValue = attrs.get("StartValue").value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Documentation":
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
# end class Property
class Documentation:
subclass = None
def __init__(self, Author=None, DeveloperDocu="", UserDocu=""):
self.Author = Author
self.DeveloperDocu = DeveloperDocu
self.UserDocu = UserDocu
def factory(*args_, **kwargs_):
if Documentation.subclass:
return Documentation.subclass(*args_, **kwargs_)
else:
return Documentation(*args_, **kwargs_)
factory = staticmethod(factory)
def getAuthor(self):
return self.Author
def setAuthor(self, Author):
self.Author = Author
def getDeveloperdocu(self):
return self.DeveloperDocu
def setDeveloperdocu(self, DeveloperDocu):
self.DeveloperDocu = DeveloperDocu
def getUserdocu(self):
return self.UserDocu
def setUserdocu(self, UserDocu):
self.UserDocu = UserDocu
def export(self, outfile, level, name_="Documentation"):
showIndent(outfile, level)
outfile.write("<%s>\n" % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Documentation"):
pass
def exportChildren(self, outfile, level, name_="Documentation"):
if self.Author:
self.Author.export(outfile, level)
showIndent(outfile, level)
outfile.write(
"<DeveloperDocu>%s</DeveloperDocu>\n" % quote_xml(self.getDeveloperdocu())
)
showIndent(outfile, level)
outfile.write("<UserDocu>%s</UserDocu>\n" % quote_xml(self.getUserdocu()))
def exportLiteral(self, outfile, level, name_="Documentation"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.Author:
showIndent(outfile, level)
outfile.write("Author=Author(\n")
self.Author.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
showIndent(outfile, level)
outfile.write("DeveloperDocu=%s,\n" % quote_python(self.getDeveloperdocu()))
showIndent(outfile, level)
outfile.write("UserDocu=%s,\n" % quote_python(self.getUserdocu()))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Author":
obj_ = Author.factory()
obj_.build(child_)
self.setAuthor(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "DeveloperDocu":
DeveloperDocu_ = ""
for text__content_ in child_.childNodes:
DeveloperDocu_ += text__content_.nodeValue
self.DeveloperDocu = DeveloperDocu_
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "UserDocu":
UserDocu_ = ""
for text__content_ in child_.childNodes:
UserDocu_ += text__content_.nodeValue
self.UserDocu = UserDocu_
# end class Documentation
class Author:
subclass = None
def __init__(self, Name="", EMail="", Licence="", valueOf_=""):
self.Name = Name
self.EMail = EMail
self.Licence = Licence
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if Author.subclass:
return Author.subclass(*args_, **kwargs_)
else:
return Author(*args_, **kwargs_)
factory = staticmethod(factory)
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def getEmail(self):
return self.EMail
def setEmail(self, EMail):
self.EMail = EMail
def getLicence(self):
return self.Licence
def setLicence(self, Licence):
self.Licence = Licence
def getValueOf_(self):
return self.valueOf_
def setValueOf_(self, valueOf_):
self.valueOf_ = valueOf_
def export(self, outfile, level, name_="Author"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="Author")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Author"):
outfile.write(' Name="%s"' % (self.getName(),))
outfile.write(' EMail="%s"' % (self.getEmail(),))
if self.getLicence() is not None:
outfile.write(' Licence="%s"' % (self.getLicence(),))
def exportChildren(self, outfile, level, name_="Author"):
outfile.write(self.valueOf_)
def exportLiteral(self, outfile, level, name_="Author"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('EMail = "%s",\n' % (self.getEmail(),))
showIndent(outfile, level)
outfile.write('Licence = "%s",\n' % (self.getLicence(),))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
if attrs.get("EMail"):
self.EMail = attrs.get("EMail").value
if attrs.get("Licence"):
self.Licence = attrs.get("Licence").value
def buildChildren(self, child_, nodeName_):
self.valueOf_ = ""
for child in child_.childNodes:
if child.nodeType == Node.TEXT_NODE:
self.valueOf_ += child.nodeValue
# end class Author
class ViewProvider:
subclass = None
def __init__(self, Property=None):
if Property is None:
self.Property = []
else:
self.Property = Property
def factory(*args_, **kwargs_):
if ViewProvider.subclass:
return ViewProvider.subclass(*args_, **kwargs_)
else:
return ViewProvider(*args_, **kwargs_)
factory = staticmethod(factory)
def getProperty(self):
return self.Property
def setProperty(self, Property):
self.Property = Property
def addProperty(self, value):
self.Property.append(value)
def insertProperty(self, index, value):
self.Property[index] = value
def export(self, outfile, level, name_="ViewProvider"):
showIndent(outfile, level)
outfile.write("<%s>\n" % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="ViewProvider"):
pass
def exportChildren(self, outfile, level, name_="ViewProvider"):
for Property_ in self.getProperty():
Property_.export(outfile, level)
def exportLiteral(self, outfile, level, name_="ViewProvider"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write("Property=[\n")
level += 1
for Property in self.Property:
showIndent(outfile, level)
outfile.write("Property(\n")
Property.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write("),\n")
level -= 1
showIndent(outfile, level)
outfile.write("],\n")
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "Property":
obj_ = Property.factory()
obj_.build(child_)
self.Property.append(obj_)
# end class ViewProvider
class Parameter:
subclass = None
def __init__(self, Name="", Type="", valueOf_=""):
self.Name = Name
self.Type = Type
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if Parameter.subclass:
return Parameter.subclass(*args_, **kwargs_)
else:
return Parameter(*args_, **kwargs_)
factory = staticmethod(factory)
def getName(self):
return self.Name
def setName(self, Name):
self.Name = Name
def getType(self):
return self.Type
def setType(self, Type):
self.Type = Type
def getValueOf_(self):
return self.valueOf_
def setValueOf_(self, valueOf_):
self.valueOf_ = valueOf_
def export(self, outfile, level, name_="Parameter"):
showIndent(outfile, level)
outfile.write("<%s" % (name_,))
self.exportAttributes(outfile, level, name_="Parameter")
outfile.write(">\n")
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write("</%s>\n" % name_)
def exportAttributes(self, outfile, level, name_="Parameter"):
outfile.write(' Name="%s"' % (self.getName(),))
outfile.write(' Type="%s"' % (self.getType(),))
def exportChildren(self, outfile, level, name_="Parameter"):
outfile.write(self.valueOf_)
def exportLiteral(self, outfile, level, name_="Parameter"):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('Type = "%s",\n' % (self.getType(),))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(":")[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get("Name"):
self.Name = attrs.get("Name").value
if attrs.get("Type"):
self.Type = attrs.get("Type").value
def buildChildren(self, child_, nodeName_):
self.valueOf_ = ""
for child in child_.childNodes:
if child.nodeType == Node.TEXT_NODE:
self.valueOf_ += child.nodeValue
# end class Parameter
from xml.sax import handler, make_parser
class SaxStackElement:
def __init__(self, name="", obj=None):
self.name = name
self.obj = obj
self.content = ""
#
# SAX handler
#
class SaxGeneratemodelHandler(handler.ContentHandler):
def __init__(self):
self.stack = []
self.root = None
def getRoot(self):
return self.root
def setDocumentLocator(self, locator):
self.locator = locator
def showError(self, msg):
print("*** (showError):", msg)
sys.exit(-1)
def startElement(self, name, attrs):
done = 0
if name == "GenerateModel":
obj = GenerateModel.factory()
stackObj = SaxStackElement("GenerateModel", obj)
self.stack.append(stackObj)
done = 1
elif name == "Module":
obj = Module.factory()
stackObj = SaxStackElement("Module", obj)
self.stack.append(stackObj)
done = 1
elif name == "PythonExport":
obj = PythonExport.factory()
val = attrs.get("Name", None)
if val is not None:
obj.setName(val)
val = attrs.get("PythonName", None)
if val is not None:
obj.setPythonname(val)
val = attrs.get("Include", None)
if val is not None:
obj.setInclude(val)
val = attrs.get("Father", None)
if val is not None:
obj.setFather(val)
val = attrs.get("Twin", None)
if val is not None:
obj.setTwin(val)
val = attrs.get("Namespace", None)
if val is not None:
obj.setNamespace(val)
val = attrs.get("FatherInclude", None)
if val is not None:
obj.setFatherinclude(val)
val = attrs.get("FatherNamespace", None)
if val is not None:
obj.setFathernamespace(val)
val = attrs.get("Constructor", None)
if val is not None:
if val in ("true", "1"):
obj.setConstructor(1)
elif val in ("false", "0"):
obj.setConstructor(0)
else:
self.reportError(
'"Constructor" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("NumberProtocol", None)
if val is not None:
if val in ("true", "1"):
obj.setNumberprotocol(1)
elif val in ("false", "0"):
obj.setNumberprotocol(0)
else:
self.reportError(
'"NumberProtocol" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("RichCompare", None)
if val is not None:
if val in ("true", "1"):
obj.setRichcompare(1)
elif val in ("false", "0"):
obj.setRichcompare(0)
else:
self.reportError(
'"RichCompare" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("TwinPointer", None)
if val is not None:
obj.setTwinpointer(val)
val = attrs.get("Delete", None)
if val is not None:
if val in ("true", "1"):
obj.setDelete(1)
elif val in ("false", "0"):
obj.setDelete(0)
else:
self.reportError(
'"Delete" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("Reference", None)
if val is not None:
if val in ("true", "1"):
obj.setReference(1)
elif val in ("false", "0"):
obj.setReference(0)
else:
self.reportError(
'"Reference" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("Initialization", None)
if val is not None:
if val in ("true", "1"):
obj.setInitialization(1)
elif val in ("false", "0"):
obj.setInitialization(0)
else:
self.reportError(
'"Initialization" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("DisableNotify", None)
if val is not None:
if val in ("true", "1"):
obj.setDisablenotify(1)
elif val in ("false", "0"):
obj.setDisablenotify(0)
else:
self.reportError(
'"DisableNotify" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("DescriptorGetter", None)
if val is not None:
if val in ("true", "1"):
obj.setDescriptorgetter(1)
elif val in ("false", "0"):
obj.setDescriptorgetter(0)
else:
self.reportError(
'"DescriptorGetter" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("DescriptorSetter", None)
if val is not None:
if val in ("true", "1"):
obj.setDescriptorsetter(1)
elif val in ("false", "0"):
obj.setDescriptorsetter(0)
else:
self.reportError(
'"DescriptorSetter" attribute must be boolean ("true", "1", "false", "0")'
)
stackObj = SaxStackElement("PythonExport", obj)
self.stack.append(stackObj)
done = 1
elif name == "Documentation":
obj = Documentation.factory()
stackObj = SaxStackElement("Documentation", obj)
self.stack.append(stackObj)
done = 1
elif name == "Methode":
obj = Methode.factory()
val = attrs.get("Name", None)
if val is not None:
obj.setName(val)
val = attrs.get("Const", None)
if val is not None:
if val in ("true", "1"):
obj.setConst(1)
elif val in ("false", "0"):
obj.setConst(0)
else:
self.reportError(
'"Const" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("Keyword", None)
if val is not None:
if val in ("true", "1"):
obj.setKeyword(1)
elif val in ("false", "0"):
obj.setKeyword(0)
else:
self.reportError(
'"Keyword" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("NoArgs", None)
if val is not None:
if val in ("true", "1"):
obj.setNoargs(1)
elif val in ("false", "0"):
obj.setNoargs(0)
else:
self.reportError(
'"NoArgs" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("Class", None)
if val is not None:
if val in ("true", "1"):
obj.setClass(1)
elif val in ("false", "0"):
obj.setClass(0)
else:
self.reportError(
'"Class" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("Static", None)
if val is not None:
if val in ("true", "1"):
obj.setStatic(1)
elif val in ("false", "0"):
obj.setStatic(0)
else:
self.reportError(
'"Static" attribute must be boolean ("true", "1", "false", "0")'
)
stackObj = SaxStackElement("Methode", obj)
self.stack.append(stackObj)
done = 1
elif name == "Parameter":
obj = Parameter.factory()
val = attrs.get("Name", None)
if val is not None:
obj.setName(val)
val = attrs.get("Type", None)
if val is not None:
obj.setType(val)
stackObj = SaxStackElement("Parameter", obj)
self.stack.append(stackObj)
done = 1
elif name == "Attribute":
obj = Attribute.factory()
val = attrs.get("Name", None)
if val is not None:
obj.setName(val)
val = attrs.get("ReadOnly", None)
if val is not None:
if val in ("true", "1"):
obj.setReadonly(1)
elif val in ("false", "0"):
obj.setReadonly(0)
else:
self.reportError(
'"ReadOnly" attribute must be boolean ("true", "1", "false", "0")'
)
stackObj = SaxStackElement("Attribute", obj)
self.stack.append(stackObj)
done = 1
elif name == "Sequence":
obj = Sequence.factory()
val = attrs.get("sq_length", None)
if val is not None:
if val in ("true", "1"):
obj.setSq_length(1)
elif val in ("false", "0"):
obj.setSq_length(0)
else:
self.reportError(
'"sq_length" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("sq_concat", None)
if val is not None:
if val in ("true", "1"):
obj.setSq_concat(1)
elif val in ("false", "0"):
obj.setSq_concat(0)
else:
self.reportError(
'"sq_concat" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("sq_repeat", None)
if val is not None:
if val in ("true", "1"):
obj.setSq_repeat(1)
elif val in ("false", "0"):
obj.setSq_repeat(0)
else:
self.reportError(
'"sq_repeat" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("sq_item", None)
if val is not None:
if val in ("true", "1"):
obj.setSq_item(1)
elif val in ("false", "0"):
obj.setSq_item(0)
else:
self.reportError(
'"sq_item" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("mp_subscript", None)
if val is not None:
if val in ("true", "1"):
obj.setMp_subscript(1)
elif val in ("false", "0"):
obj.setMp_subscript(0)
else:
self.reportError(
'"mp_subscript" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("sq_ass_item", None)
if val is not None:
if val in ("true", "1"):
obj.setSq_ass_item(1)
elif val in ("false", "0"):
obj.setSq_ass_item(0)
else:
self.reportError(
'"sq_ass_item" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("mp_ass_subscript", None)
if val is not None:
if val in ("true", "1"):
obj.setMp_ass_subscript(1)
elif val in ("false", "0"):
obj.setMp_ass_subscript(0)
else:
self.reportError(
'"mp_ass_subscript" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("sq_contains", None)
if val is not None:
if val in ("true", "1"):
obj.setSq_contains(1)
elif val in ("false", "0"):
obj.setSq_contains(0)
else:
self.reportError(
'"sq_contains" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("sq_inplace_concat", None)
if val is not None:
if val in ("true", "1"):
obj.setSq_inplace_concat(1)
elif val in ("false", "0"):
obj.setSq_inplace_concat(0)
else:
self.reportError(
'"sq_inplace_concat" attribute must be boolean ("true", "1", "false", "0")'
)
val = attrs.get("sq_inplace_repeat", None)
if val is not None:
if val in ("true", "1"):
obj.setSq_inplace_repeat(1)
elif val in ("false", "0"):
obj.setSq_inplace_repeat(0)
else:
self.reportError(
'"sq_inplace_repeat" attribute must be boolean ("true", "1", "false", "0")'
)
stackObj = SaxStackElement("Sequence", obj)
self.stack.append(stackObj)
done = 1
elif name == "CustomAttributes":
stackObj = SaxStackElement("CustomAttributes", None)
self.stack.append(stackObj)
done = 1
elif name == "ClassDeclarations":
stackObj = SaxStackElement("ClassDeclarations", None)
self.stack.append(stackObj)
done = 1
elif name == "ForwardDeclarations":
stackObj = SaxStackElement("ForwardDeclarations", None)
self.stack.append(stackObj)
done = 1
elif name == "Dependencies":
obj = Dependencies.factory()
stackObj = SaxStackElement("Dependencies", obj)
self.stack.append(stackObj)
done = 1
elif name == "Content":
obj = Content.factory()
stackObj = SaxStackElement("Content", obj)
self.stack.append(stackObj)
done = 1
elif name == "Property":
obj = Property.factory()
stackObj = SaxStackElement("Property", obj)
self.stack.append(stackObj)
done = 1
elif name == "Feature":
obj = Feature.factory()
val = attrs.get("Name", None)
if val is not None:
obj.setName(val)
stackObj = SaxStackElement("Feature", obj)
self.stack.append(stackObj)
done = 1
elif name == "ViewProvider":
obj = ViewProvider.factory()
stackObj = SaxStackElement("ViewProvider", obj)
self.stack.append(stackObj)
done = 1
elif name == "DocObject":
obj = DocObject.factory()
val = attrs.get("Name", None)
if val is not None:
obj.setName(val)
stackObj = SaxStackElement("DocObject", obj)
self.stack.append(stackObj)
done = 1
elif name == "GuiCommand":
stackObj = SaxStackElement("GuiCommand", None)
self.stack.append(stackObj)
done = 1
elif name == "PreferencesPage":
stackObj = SaxStackElement("PreferencesPage", None)
self.stack.append(stackObj)
done = 1
elif name == "Author":
obj = Author.factory()
val = attrs.get("Name", None)
if val is not None:
obj.setName(val)
val = attrs.get("EMail", None)
if val is not None:
obj.setEmail(val)
val = attrs.get("Licence", None)
if val is not None:
obj.setLicence(val)
stackObj = SaxStackElement("Author", obj)
self.stack.append(stackObj)
done = 1
elif name == "DeveloperDocu":
stackObj = SaxStackElement("DeveloperDocu", None)
self.stack.append(stackObj)
done = 1
elif name == "UserDocu":
stackObj = SaxStackElement("UserDocu", None)
self.stack.append(stackObj)
done = 1
if not done:
self.reportError('"%s" element not allowed here.' % name)
def endElement(self, name):
done = 0
if name == "GenerateModel":
if len(self.stack) == 1:
self.root = self.stack[-1].obj
self.stack.pop()
done = 1
elif name == "Module":
if len(self.stack) >= 2:
self.stack[-2].obj.addModule(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "PythonExport":
if len(self.stack) >= 2:
self.stack[-2].obj.addPythonexport(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "Documentation":
if len(self.stack) >= 2:
self.stack[-2].obj.setDocumentation(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "Methode":
if len(self.stack) >= 2:
self.stack[-2].obj.addMethode(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "Parameter":
if len(self.stack) >= 2:
self.stack[-2].obj.addParameter(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "Attribute":
if len(self.stack) >= 2:
self.stack[-2].obj.addAttribute(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "Sequence":
if len(self.stack) >= 2:
self.stack[-2].obj.setSequence(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "CustomAttributes":
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setCustomattributes(content)
self.stack.pop()
done = 1
elif name == "ClassDeclarations":
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setClassdeclarations(content)
self.stack.pop()
done = 1
elif name == "ForwardDeclarations":
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setForwarddeclarations(content)
self.stack.pop()
done = 1
elif name == "Dependencies":
if len(self.stack) >= 2:
self.stack[-2].obj.setDependencies(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "Content":
if len(self.stack) >= 2:
self.stack[-2].obj.setContent(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "Property":
if len(self.stack) >= 2:
self.stack[-2].obj.addProperty(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "Feature":
if len(self.stack) >= 2:
self.stack[-2].obj.addFeature(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "ViewProvider":
if len(self.stack) >= 2:
self.stack[-2].obj.setViewprovider(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "DocObject":
if len(self.stack) >= 2:
self.stack[-2].obj.addDocobject(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "GuiCommand":
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.addGuicommand(content)
self.stack.pop()
done = 1
elif name == "PreferencesPage":
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.addPreferencespage(content)
self.stack.pop()
done = 1
elif name == "Author":
if len(self.stack) >= 2:
self.stack[-2].obj.setAuthor(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == "DeveloperDocu":
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setDeveloperdocu(content)
self.stack.pop()
done = 1
elif name == "UserDocu":
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setUserdocu(content)
self.stack.pop()
done = 1
if not done:
self.reportError('"%s" element not allowed here.' % name)
def characters(self, chrs, start, end):
if len(self.stack) > 0:
self.stack[-1].content += chrs[start:end]
def reportError(self, mesg):
locator = self.locator
sys.stderr.write(
"Doc: %s Line: %d Column: %d\n"
% (
locator.getSystemId(),
locator.getLineNumber(),
locator.getColumnNumber() + 1,
)
)
sys.stderr.write(mesg)
sys.stderr.write("\n")
sys.exit(-1)
# raise RuntimeError
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
Options:
-s Use the SAX parser, not the minidom parser.
"""
def usage():
print(USAGE_TEXT)
sys.exit(-1)
#
# SAX handler used to determine the top level element.
#
class SaxSelectorHandler(handler.ContentHandler):
def __init__(self):
self.topElementName = None
def getTopElementName(self):
return self.topElementName
def startElement(self, name, attrs):
self.topElementName = name
raise StopIteration
def parseSelect(inFileName):
infile = open(inFileName, "r")
topElementName = None
parser = make_parser()
documentHandler = SaxSelectorHandler()
parser.setContentHandler(documentHandler)
try:
try:
parser.parse(infile)
except StopIteration:
topElementName = documentHandler.getTopElementName()
if topElementName is None:
raise RuntimeError("no top level element")
topElementName = topElementName.replace("-", "_").replace(":", "_")
if topElementName not in globals():
raise RuntimeError("no class for top element: %s" % topElementName)
topElement = globals()[topElementName]
infile.seek(0)
doc = minidom.parse(infile)
finally:
infile.close()
rootNode = doc.childNodes[0]
rootObj = topElement.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0)
return rootObj
def saxParse(inFileName):
parser = make_parser()
documentHandler = SaxGeneratemodelHandler()
parser.setDocumentHandler(documentHandler)
parser.parse("file:%s" % inFileName)
root = documentHandler.getRoot()
sys.stdout.write('<?xml version="1.0" ?>\n')
root.export(sys.stdout, 0)
return root
def saxParseString(inString):
parser = make_parser()
documentHandler = SaxGeneratemodelHandler()
parser.setDocumentHandler(documentHandler)
parser.feed(inString)
parser.close()
rootObj = documentHandler.getRoot()
# sys.stdout.write('<?xml version="1.0" ?>\n')
# rootObj.export(sys.stdout, 0)
return rootObj
def parse(inFileName):
doc = minidom.parse(inFileName)
rootNode = doc.documentElement
rootObj = GenerateModel.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_="GenerateModel")
return rootObj
def parseString(inString):
doc = minidom.parseString(inString)
rootNode = doc.documentElement
rootObj = GenerateModel.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_="GenerateModel")
return rootObj
def parseLiteral(inFileName):
doc = minidom.parse(inFileName)
rootNode = doc.documentElement
rootObj = GenerateModel.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write("from generateModel_Module import *\n\n")
sys.stdout.write("rootObj = GenerateModel(\n")
rootObj.exportLiteral(sys.stdout, 0, name_="GenerateModel")
sys.stdout.write(")\n")
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 2 and args[0] == "-s":
saxParse(args[1])
elif len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == "__main__":
main()
# import pdb
# pdb.run('main()')
|
sslcrypto | _aes | # pylint: disable=import-outside-toplevel
class AES:
def __init__(self, backend, fallback=None):
self._backend = backend
self._fallback = fallback
def get_algo_key_length(self, algo):
if algo.count("-") != 2:
raise ValueError("Invalid algorithm name")
try:
return int(algo.split("-")[1]) // 8
except ValueError:
raise ValueError("Invalid algorithm name") from None
def new_key(self, algo="aes-256-cbc"):
if not self._backend.is_algo_supported(algo):
if self._fallback is None:
raise ValueError("This algorithm is not supported")
return self._fallback.new_key(algo)
return self._backend.random(self.get_algo_key_length(algo))
def encrypt(self, data, key, algo="aes-256-cbc"):
if not self._backend.is_algo_supported(algo):
if self._fallback is None:
raise ValueError("This algorithm is not supported")
return self._fallback.encrypt(data, key, algo)
key_length = self.get_algo_key_length(algo)
if len(key) != key_length:
raise ValueError(
"Expected key to be {} bytes, got {} bytes".format(key_length, len(key))
)
return self._backend.encrypt(data, key, algo)
def decrypt(self, ciphertext, iv, key, algo="aes-256-cbc"):
if not self._backend.is_algo_supported(algo):
if self._fallback is None:
raise ValueError("This algorithm is not supported")
return self._fallback.decrypt(ciphertext, iv, key, algo)
key_length = self.get_algo_key_length(algo)
if len(key) != key_length:
raise ValueError(
"Expected key to be {} bytes, got {} bytes".format(key_length, len(key))
)
return self._backend.decrypt(ciphertext, iv, key, algo)
def get_backend(self):
return self._backend.get_backend()
|
Draft | Draft | # -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2009, 2010 Yorik van Havre <yorik@uncreated.net> *
# * Copyright (c) 2009, 2010 Ken Cline <cline@frii.com> *
# * Copyright (c) 2020 FreeCAD Developers *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provide the Draft Workbench public programming interface.
The Draft module offers tools to create and manipulate 2D objects.
The functions in this file must be usable without requiring the
graphical user interface.
These functions can be used as the backend for the graphical commands
defined in `DraftTools.py`.
"""
## \addtogroup DRAFT
# @{
import FreeCAD as App
if App.GuiUp:
import Draft_rc
gui = True
# To prevent complaints from code checkers (flake8)
True if Draft_rc.__name__ else False
else:
gui = False
__title__ = "FreeCAD Draft Workbench"
__author__ = (
"Yorik van Havre, Werner Mayer, Martin Burbaum, Ken Cline, "
"Dmitry Chigrin, Daniel Falck"
)
__url__ = "https://www.freecad.org"
# ---------------------------------------------------------------------------
# Draft functions
# ---------------------------------------------------------------------------
from draftfunctions.array import array
from draftfunctions.cut import cut
from draftfunctions.downgrade import downgrade
from draftfunctions.draftify import draftify
from draftfunctions.dxf import get_dxf, getDXF
from draftfunctions.extrude import extrude
from draftfunctions.fuse import fuse
from draftfunctions.heal import heal
from draftfunctions.join import join_two_wires, join_wires, joinTwoWires, joinWires
from draftfunctions.mirror import mirror
from draftfunctions.move import (
copy_moved_edges,
copyMovedEdges,
move,
move_edge,
move_vertex,
moveEdge,
moveVertex,
)
from draftfunctions.offset import offset
from draftfunctions.rotate import (
copy_rotated_edges,
copyRotatedEdges,
rotate,
rotate_edge,
rotate_vertex,
rotateEdge,
rotateVertex,
)
from draftfunctions.scale import (
copy_scaled_edges,
copyScaledEdges,
scale,
scale_edge,
scale_vertex,
scaleEdge,
scaleVertex,
)
from draftfunctions.split import split
from draftfunctions.svg import get_svg, getSVG
from draftfunctions.upgrade import upgrade
# arcs
from draftmake.make_arc_3points import make_arc_3points
from draftmake.make_circle import make_circle, makeCircle
from draftmake.make_ellipse import make_ellipse, makeEllipse
from draftmake.make_rectangle import make_rectangle, makeRectangle
# base object
from draftobjects.base import DraftObject, _DraftObject
# circle
from draftobjects.circle import Circle, _Circle
# App::Link support, used by the arrays
from draftobjects.draftlink import DraftLink, _DraftLink
# ellipse
from draftobjects.ellipse import Ellipse, _Ellipse
# rectangle
from draftobjects.rectangle import Rectangle, _Rectangle
from draftutils.groups import (
get_group_contents,
get_group_names,
get_movable_children,
get_windows,
getGroupContents,
getGroupNames,
getMovableChildren,
is_group,
ungroup,
)
from draftutils.gui_utils import (
autogroup,
dim_dash,
dim_symbol,
dimDash,
dimSymbol,
format_object,
formatObject,
get3DView,
get_3d_view,
get_bbox,
get_diffuse_color,
get_selection,
get_selection_ex,
getSelection,
getSelectionEx,
load_texture,
loadTexture,
remove_hidden,
removeHidden,
select,
)
# ---------------------------------------------------------------------------
# Utility functions
# ---------------------------------------------------------------------------
from draftutils.utils import ARROW_TYPES as arrowtypes
from draftutils.utils import (
argb_to_rgba,
compare_objects,
compareObjects,
epsilon,
filter_objects_for_modifiers,
filterObjectsForModifiers,
get_clone_base,
get_objects_of_type,
get_param,
get_param_type,
get_real_name,
get_rgb,
get_type,
getCloneBase,
getObjectsOfType,
getParam,
getParamType,
getRealName,
getrgb,
getType,
is_clone,
is_closed_edge,
isClone,
isClosedEdge,
load_svg_patterns,
loadSvgPatterns,
precision,
print_shape,
printShape,
rgba_to_argb,
set_param,
setParam,
shapify,
string_encode_coin,
stringencodecoin,
svg_patterns,
svgpatterns,
tolerance,
type_check,
typecheck,
)
# base viewprovider
from draftviewproviders.view_base import (
ViewProviderDraft,
ViewProviderDraftAlt,
ViewProviderDraftPart,
_ViewProviderDraft,
_ViewProviderDraftAlt,
_ViewProviderDraftPart,
)
from draftviewproviders.view_draftlink import (
ViewProviderDraftLink,
_ViewProviderDraftLink,
)
# ---------------------------------------------------------------------------
# Draft objects
# ---------------------------------------------------------------------------
if App.GuiUp:
from draftviewproviders.view_rectangle import (
ViewProviderRectangle,
_ViewProviderRectangle,
)
# polygon
from draftmake.make_line import make_line, makeLine
from draftmake.make_polygon import make_polygon, makePolygon
from draftmake.make_wire import make_wire, makeWire
from draftobjects.polygon import Polygon, _Polygon
# wire and line
from draftobjects.wire import Wire, _Wire
if App.GuiUp:
from draftviewproviders.view_wire import ViewProviderWire, _ViewProviderWire
# bspline
from draftmake.make_bspline import make_bspline, makeBSpline
from draftobjects.bspline import BSpline, _BSpline
if App.GuiUp:
from draftviewproviders.view_bspline import (
ViewProviderBSpline,
_ViewProviderBSpline,
)
# bezcurve
from draftmake.make_bezcurve import make_bezcurve, makeBezCurve
from draftobjects.bezcurve import BezCurve, _BezCurve
if App.GuiUp:
from draftviewproviders.view_bezcurve import (
ViewProviderBezCurve,
_ViewProviderBezCurve,
)
# copy
from draftmake.make_clone import clone, make_clone
from draftmake.make_copy import make_copy
from draftmake.make_copy import make_copy as makeCopy
# clone
from draftobjects.clone import Clone, _Clone
if App.GuiUp:
from draftviewproviders.view_clone import ViewProviderClone, _ViewProviderClone
# point
from draftmake.make_point import make_point, makePoint
from draftobjects.point import Point, _Point
if App.GuiUp:
from draftviewproviders.view_point import ViewProviderPoint, _ViewProviderPoint
# arrays
from draftmake.make_array import make_array, makeArray
from draftmake.make_circulararray import make_circular_array
from draftmake.make_orthoarray import (
make_ortho_array,
make_ortho_array2d,
make_rect_array,
make_rect_array2d,
)
from draftmake.make_patharray import (
make_path_array,
make_path_twisted_array,
makePathArray,
)
from draftmake.make_pointarray import make_point_array, makePointArray
from draftmake.make_polararray import make_polar_array
from draftobjects.array import Array, _Array
from draftobjects.patharray import PathArray, _PathArray
from draftobjects.pointarray import PointArray, _PointArray
if App.GuiUp:
from draftviewproviders.view_array import (
ViewProviderDraftArray,
_ViewProviderDraftArray,
)
# facebinder
from draftmake.make_facebinder import make_facebinder, makeFacebinder
from draftobjects.facebinder import Facebinder, _Facebinder
if App.GuiUp:
from draftviewproviders.view_facebinder import (
ViewProviderFacebinder,
_ViewProviderFacebinder,
)
# shapestring
from draftmake.make_block import make_block, makeBlock
from draftmake.make_shapestring import make_shapestring, makeShapeString
from draftobjects.block import Block, _Block
# shapestring
from draftobjects.shapestring import ShapeString, _ShapeString
if App.GuiUp:
from draftviewproviders.view_shapestring import ViewProviderShapeString
# shape 2d view
from draftmake.make_shape2dview import make_shape2dview, makeShape2DView
# sketch
from draftmake.make_sketch import make_sketch, makeSketch
from draftmake.make_wpproxy import make_workingplaneproxy, makeWorkingPlaneProxy
from draftobjects.shape2dview import Shape2DView, _Shape2DView
# working plane proxy
from draftobjects.wpproxy import WorkingPlaneProxy
if App.GuiUp:
from draftviewproviders.view_wpproxy import ViewProviderWorkingPlaneProxy
from draftmake.make_fillet import make_fillet
from draftobjects.fillet import Fillet
if App.GuiUp:
from draftviewproviders.view_fillet import ViewProviderFillet
from draftmake.make_layer import make_layer, makeLayer
from draftobjects.layer import Layer, _VisGroup
if App.GuiUp:
from draftviewproviders.view_layer import ViewProviderLayer, _ViewProviderVisGroup
# Annotation objects
from draftmake.make_dimension import (
make_angular_dimension,
make_dimension,
make_linear_dimension,
make_linear_dimension_obj,
make_radial_dimension_obj,
makeAngularDimension,
makeDimension,
)
from draftobjects.dimension import (
AngularDimension,
LinearDimension,
_AngularDimension,
_Dimension,
)
if App.GuiUp:
from draftviewproviders.view_dimension import (
ViewProviderLinearDimension,
_ViewProviderDimension,
ViewProviderAngularDimension,
_ViewProviderAngularDimension,
)
from draftmake.make_label import make_label, makeLabel
from draftobjects.label import DraftLabel, Label
if App.GuiUp:
from draftviewproviders.view_label import ViewProviderLabel, ViewProviderDraftLabel
from draftmake.make_text import (
convert_draft_texts,
convertDraftTexts,
make_text,
makeText,
)
from draftobjects.text import DraftText, Text
if App.GuiUp:
from draftviewproviders.view_text import ViewProviderText, ViewProviderDraftText
from draftmake.make_hatch import make_hatch
from draftobjects.hatch import Hatch
if App.GuiUp:
from draftviewproviders.view_hatch import ViewProviderDraftHatch
## @}
|
snippet | completer | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2012 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
The completer for the snippet editing dialog.
"""
import keyword
import re
import app
import listmodel
import textformats
import widgets.completer
from . import snippets
class Completer(widgets.completer.Completer):
def __init__(self, textedit):
super().__init__()
self.setWidget(textedit)
self.setParent(textedit) # otherwise PyQt5 loses us
app.settingsChanged.connect(self.readSettings)
self.readSettings()
def readSettings(self):
self.popup().setFont(textformats.formatData("editor").font)
self.popup().setPalette(textformats.formatData("editor").palette())
def completionCursor(self):
cursor = self.textCursor()
if self.popup().isVisible() and self._pos < cursor.position():
cursor.setPosition(self._pos, cursor.KeepAnchor)
return cursor
# alter the model
pos = cursor.position()
text = cursor.document().toPlainText()
# skip '-*- ' lines declaring variables, and check if it is python
python = False
block = cursor.document().firstBlock()
start = 0
while block.text().startswith("-*- "):
if not python:
python = any(
True
for m in snippets._variables_re.finditer(block.text())
if m.group(1) == "python"
)
block = block.next()
if not block.isValid():
break
start = block.position()
# determine the word set to complete on
if python:
pattern = r"\w+"
else:
pattern = r"\\?[\w-]+"
rx = re.compile(pattern)
words = {
m.group()
for m in rx.finditer(text, start)
if len(m.group()) > 4 and m.end() != pos
}
if python:
words.update(keyword.kwlist)
words.update(("cursor", "state", "text"))
if words:
self.setModel(listmodel.ListModel(sorted(words)))
cursor.movePosition(cursor.StartOfWord, cursor.KeepAnchor)
self._pos = cursor.position()
return cursor
|
Newsfeed | NewsfeedPlugin | import re
import time
from Db.DbQuery import DbQuery
from Debug import Debug
from Plugin import PluginManager
from util import helper
from util.Flag import flag
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
def formatSiteInfo(self, site, create_user=True):
site_info = super(UiWebsocketPlugin, self).formatSiteInfo(
site, create_user=create_user
)
feed_following = self.user.sites.get(site.address, {}).get("follow", None)
if feed_following == None:
site_info["feed_follow_num"] = None
else:
site_info["feed_follow_num"] = len(feed_following)
return site_info
def actionFeedFollow(self, to, feeds):
self.user.setFeedFollow(self.site.address, feeds)
self.user.save()
self.response(to, "ok")
def actionFeedListFollow(self, to):
feeds = self.user.sites.get(self.site.address, {}).get("follow", {})
self.response(to, feeds)
@flag.admin
def actionFeedQuery(self, to, limit=10, day_limit=3):
from Site import SiteManager
rows = []
stats = []
total_s = time.time()
num_sites = 0
for address, site_data in list(self.user.sites.items()):
feeds = site_data.get("follow")
if not feeds:
continue
if type(feeds) is not dict:
self.log.debug("Invalid feed for site %s" % address)
continue
num_sites += 1
for name, query_set in feeds.items():
site = SiteManager.site_manager.get(address)
if not site or not site.storage.has_db:
continue
s = time.time()
try:
query_raw, params = query_set
query_parts = re.split(r"UNION(?:\s+ALL|)", query_raw)
for i, query_part in enumerate(query_parts):
db_query = DbQuery(query_part)
if day_limit:
where = " WHERE %s > strftime('%%s', 'now', '-%s day')" % (
db_query.fields.get("date_added", "date_added"),
day_limit,
)
if "WHERE" in query_part:
query_part = re.sub(
"WHERE (.*?)(?=$| GROUP BY)",
where + " AND (\\1)",
query_part,
)
else:
query_part += where
query_parts[i] = query_part
query = " UNION ".join(query_parts)
if ":params" in query:
query_params = map(helper.sqlquote, params)
query = query.replace(":params", ",".join(query_params))
res = site.storage.query(
query + " ORDER BY date_added DESC LIMIT %s" % limit
)
except Exception as err: # Log error
self.log.error(
"%s feed query %s error: %s"
% (address, name, Debug.formatException(err))
)
stats.append(
{"site": site.address, "feed_name": name, "error": str(err)}
)
continue
for row in res:
row = dict(row)
if not isinstance(row["date_added"], (int, float, complex)):
self.log.debug(
"Invalid date_added from site %s: %r"
% (address, row["date_added"])
)
continue
if row["date_added"] > 1000000000000: # Formatted as millseconds
row["date_added"] = row["date_added"] / 1000
if "date_added" not in row or row["date_added"] > time.time() + 120:
self.log.debug(
"Newsfeed item from the future from from site %s" % address
)
continue # Feed item is in the future, skip it
row["site"] = address
row["feed_name"] = name
rows.append(row)
stats.append(
{
"site": site.address,
"feed_name": name,
"taken": round(time.time() - s, 3),
}
)
time.sleep(0.001)
return self.response(
to,
{
"rows": rows,
"stats": stats,
"num": len(rows),
"sites": num_sites,
"taken": round(time.time() - total_s, 3),
},
)
def parseSearch(self, search):
parts = re.split("(site|type):", search)
if len(parts) > 1: # Found filter
search_text = parts[0]
parts = [part.strip() for part in parts]
filters = dict(zip(parts[1::2], parts[2::2]))
else:
search_text = search
filters = {}
return [search_text, filters]
def actionFeedSearch(self, to, search, limit=30, day_limit=30):
if "ADMIN" not in self.site.settings["permissions"]:
return self.response(to, "FeedSearch not allowed")
from Site import SiteManager
rows = []
stats = []
num_sites = 0
total_s = time.time()
search_text, filters = self.parseSearch(search)
for address, site in SiteManager.site_manager.list().items():
if not site.storage.has_db:
continue
if "site" in filters:
if filters["site"].lower() not in [
site.address,
site.content_manager.contents["content.json"].get("title").lower(),
]:
continue
if site.storage.db: # Database loaded
feeds = site.storage.db.schema.get("feeds")
else:
try:
feeds = site.storage.loadJson("dbschema.json").get("feeds")
except:
continue
if not feeds:
continue
num_sites += 1
for name, query in feeds.items():
s = time.time()
try:
db_query = DbQuery(query)
params = []
# Filters
if search_text:
db_query.wheres.append(
"(%s LIKE ? OR %s LIKE ?)"
% (db_query.fields["body"], db_query.fields["title"])
)
search_like = "%" + search_text.replace(" ", "%") + "%"
params.append(search_like)
params.append(search_like)
if filters.get("type") and filters["type"] not in query:
continue
if day_limit:
db_query.wheres.append(
"%s > strftime('%%s', 'now', '-%s day')"
% (
db_query.fields.get("date_added", "date_added"),
day_limit,
)
)
# Order
db_query.parts["ORDER BY"] = "date_added DESC"
db_query.parts["LIMIT"] = str(limit)
res = site.storage.query(str(db_query), params)
except Exception as err:
self.log.error(
"%s feed query %s error: %s"
% (address, name, Debug.formatException(err))
)
stats.append(
{
"site": site.address,
"feed_name": name,
"error": str(err),
"query": query,
}
)
continue
for row in res:
row = dict(row)
if not row["date_added"] or row["date_added"] > time.time() + 120:
continue # Feed item is in the future, skip it
row["site"] = address
row["feed_name"] = name
rows.append(row)
stats.append(
{
"site": site.address,
"feed_name": name,
"taken": round(time.time() - s, 3),
}
)
return self.response(
to,
{
"rows": rows,
"num": len(rows),
"sites": num_sites,
"taken": round(time.time() - total_s, 3),
"stats": stats,
},
)
@PluginManager.registerTo("User")
class UserPlugin(object):
# Set queries that user follows
def setFeedFollow(self, address, feeds):
site_data = self.getSiteData(address)
site_data["follow"] = feeds
self.save()
return site_data
|
cd | linux_cd_parser | """
This module is a low-level reader and parser for audio CDs.
It heavily relies on ioctls to the linux kernel.
Original source for the code:
http://www.carey.geek.nz/code/python-cdrom/cdtoc.py
Source for all the magical constants and more infos on the ioctls:
linux/include/uapi/linux/cdrom.h
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/include/uapi/linux/cdrom.h
"""
from __future__ import division
import fcntl
import logging
import os
import struct
from xl.trax import Track
logger = logging.getLogger(__name__)
def read_cd_index(device):
"""
Reads a CD's index (table of contents, TOC).
This must happen async because the I/O operations may take some time.
@param device: a path to a CD device
@return: Array of toc entries. The last one is a dummy. To be read by parse_tracks().
"""
mcn = None
toc_entries = []
fd = os.open(device, os.O_RDONLY)
try:
(start, end) = __read_toc_header(fd)
mcn = __read_disc_mcn(fd)
# index of the end, i.e. the last toc entry which is an empty dummy
CDROM_LEADOUT = 0xAA
for toc_entry_index in list(range(start, end + 1)) + [CDROM_LEADOUT]:
toc_entry = __read_toc_entry(fd, toc_entry_index)
# XXX one could also reat+compute the `isrc` track id, see libdiscid
toc_entries.append(toc_entry)
logger.debug("Successfully read TOC of CD with MCN %s : %s", mcn, toc_entries)
except Exception:
logger.warn("Failed to read CD TOC", exc_info=True)
finally:
os.close(fd)
# clear output for convenience
if len(toc_entries) == 0:
toc_entries = None
return toc_entries, mcn
def __read_toc_header(fd):
"""A wrapper for the `CDROMREADTOCHDR` ioctl"""
# struct cdrom_tochdr of 2 times u8
FORMAT_cdrom_tochdr = "BB"
# u8 start: lowest track index (index of first track), probably always 1
# u8 end: highest track index (index of last track), = number of tracks
cdrom_tochdr = struct.pack(FORMAT_cdrom_tochdr, 0, 0)
CDROMREADTOCHDR = 0x5305
cdrom_tochdr = fcntl.ioctl(fd, CDROMREADTOCHDR, cdrom_tochdr)
start, end = struct.unpack(FORMAT_cdrom_tochdr, cdrom_tochdr)
return (start, end)
def __read_disc_mcn(fd):
"""A wrapper for the `CDROM_GET_MCN` ioctl"""
# struct cdrom_mcn of 14 bytes, null-terminated
FORMAT_cdrom_mcn = "14s"
cdrom_mcn = struct.pack(FORMAT_cdrom_mcn, b"\0")
CDROM_GET_MCN = 0x5311
cdrom_mcn = fcntl.ioctl(fd, CDROM_GET_MCN, cdrom_mcn)
raw_mcn = struct.unpack(FORMAT_cdrom_mcn, cdrom_mcn)
mcn = raw_mcn[0][0:13]
if b"0000000000000" in mcn:
return None
else:
return mcn
def __read_toc_entry(fd, toc_entry_num):
"""A wrapper for the `CDROMREADTOCENTRY` ioctl"""
# value constant: Minute, Second, Frame: binary (not bcd here)
CDROM_MSF = 0x02
# struct cdrom_tocentry of 3 times u8 followed by an int and another u8
FORMAT_cdrom_tocentry = "BBBiB"
# u8 cdte_track: Track number. Starts with 1, which is used for the TOC and contains data.
# u8 cdte_adr_ctrl: 4 high bits -> cdte_ctrl, 4 low bits -> cdte_adr
# u8 cdte_format: should be CDROM_MSF=0x02 as requested before
# int cdte_addr: see below
# u8 cdte_datamode: ??? (ignored)
cdrom_tocentry = struct.pack(
FORMAT_cdrom_tocentry, toc_entry_num, 0, CDROM_MSF, 0, 0
)
CDROMREADTOCENTRY = 0x5306
cdrom_tocentry = fcntl.ioctl(fd, CDROMREADTOCENTRY, cdrom_tocentry)
cdte_track, cdte_adr_ctrl, cdte_format, cdte_addr, _cdte_datamode = struct.unpack(
FORMAT_cdrom_tocentry, cdrom_tocentry
)
if cdte_format is not CDROM_MSF:
raise OSError("Invalid syscall answer")
# unused:
# cdte_adr = cdte_adr_ctrl & 0x0f # lower nibble
cdte_ctrl = (cdte_adr_ctrl & 0xF0) >> 4 # higher nibble
CDROM_DATA_TRACK = 0x04
# data: `True` if this "track" contains data, `False` if it is audio
is_data_track = bool(cdte_ctrl & CDROM_DATA_TRACK)
# union cdrom_addr of struct cdrom_msf0 and int
# struct cdrom_msf0 of 3 times u8 plus padding to match size of int
FORMAT_cdrom_addr = "BBB" + "x" * (struct.calcsize("i") - 3)
# u8 minute: Minutes from beginning of CD
# u8 second: Seconds after `minute`
# u8 frame: Frames after `frame`
minute, second, frame = struct.unpack(
FORMAT_cdrom_addr, struct.pack("i", cdte_addr)
)
return (cdte_track, is_data_track, minute, second, frame)
def parse_tracks(toc_entries, mcn, device):
"""
Parses the given toc entries and mcn into tracks.
As a result, the data will only contain track numbers and lengths but
no sophisticated metadata.
@param toc_entries: from read_cd_index()
@param mcn: from read_cd_index()
@param device: Name of the CD device
@return: An array of xl.trax.Track with minimal information
"""
real_track_count = len(toc_entries) - 1 # ignore the empty dummy track at the end
tracks = []
for toc_entry_index in range(0, real_track_count):
(track_index, is_data_track, _, _, _) = toc_entries[toc_entry_index]
if is_data_track:
continue
if track_index is not toc_entry_index + 1:
logger.warn(
"Unexpected index found. %ith toc entry claims to be track number %i",
toc_entry_index,
track_index,
)
length = __calculate_track_length(
toc_entries[toc_entry_index], toc_entries[toc_entry_index + 1]
)
track_uri = "cdda://%d/#%s" % (track_index, device)
track = Track(uri=track_uri, scan=False)
track_number = "{0}/{1}".format(track_index, real_track_count)
track.set_tags(
title="Track %d" % track_index, tracknumber=track_number, __length=length
)
if mcn:
track.set_tags(mcn=mcn)
tracks.append(track)
return tracks
def __calculate_track_length(current_track, next_track):
"""Calculate length of a single track from its data and the data of the following track"""
(_, _, begin_minute, begin_second, begin_frame) = current_track
(_, _, end_minute, end_second, end_frame) = next_track
length_minutes = end_minute - begin_minute
length_seconds = end_second - begin_second
length_frames = end_frame - begin_frame
# 75 frames per second, see CD_FRAMES in cdrom.h file
length = length_minutes * 60 + length_seconds + length_frames / 75
return length
|
fec | bitflip | #!/usr/bin/env python
#
# Copyright 2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
def bitreverse(mint):
res = 0
while mint != 0:
res = res << 1
res += mint & 1
mint = mint >> 1
return res
const_lut = [2]
specinvert_lut = [[0, 2, 1, 3]]
def bitflip(mint, bitflip_lut, index, csize):
res = 0
cnt = 0
mask = (1 << const_lut[index]) - 1
while cnt < csize:
res += (bitflip_lut[(mint >> cnt) & (mask)]) << cnt
cnt += const_lut[index]
return res
def read_bitlist(bitlist):
res = 0
for i in range(len(bitlist)):
if int(bitlist[i]) == 1:
res += 1 << (len(bitlist) - i - 1)
return res
def read_big_bitlist(bitlist):
ret = []
for j in range(0, len(bitlist) / 64):
res = 0
for i in range(0, 64):
if int(bitlist[j * 64 + i]) == 1:
res += 1 << (64 - i - 1)
ret.append(res)
res = 0
j = 0
for i in range(len(bitlist) % 64):
if int(bitlist[len(ret) * 64 + i]) == 1:
res += 1 << (64 - j - 1)
j += 1
ret.append(res)
return ret
def generate_symmetries(symlist):
retlist = []
if len(symlist) == 1:
for i in range(len(symlist[0])):
retlist.append(symlist[0][i:] + symlist[0][0:i])
invlist = symlist[0]
for i in range(1, len(symlist[0]) / 2):
invlist[i] = symlist[0][i + len(symlist[0]) / 2]
invlist[i + len(symlist[0]) / 2] = symlist[0][i]
for i in range(len(symlist[0])):
retlist.append(symlist[0][i:] + symlist[0][0:i])
return retlist
|
lector | settingsdialog | # This file is a part of Lector, a Qt based ebook reader
# Copyright (C) 2017-2019 BasioMeusPuga
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# TODO
# Get Cancel working with the file system model
import copy
import logging
import os
import pathlib
from lector import database
from lector.annotations import AnnotationsUI
from lector.logger import VERSION, logger_filename
from lector.models import MostExcellentFileSystemModel
from lector.resources import settingswindow
from lector.settings import Settings
from lector.threaded import BackGroundBookAddition, BackGroundBookSearch
from PyQt5 import QtCore, QtGui, QtWidgets
logger = logging.getLogger(__name__)
class SettingsUI(QtWidgets.QDialog, settingswindow.Ui_Dialog):
def __init__(self, parent=None):
super(SettingsUI, self).__init__()
self.setupUi(self)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self._translate = QtCore.QCoreApplication.translate
self.main_window = parent
self.database_path = self.main_window.database_path
self.image_factory = self.main_window.QImageFactory
# The annotation dialog will use the settings dialog as its parent
self.annotationsDialog = AnnotationsUI(self)
self.resize(self.main_window.settings["settings_dialog_size"])
self.move(self.main_window.settings["settings_dialog_position"])
install_dir = os.path.realpath(__file__)
install_dir = pathlib.Path(install_dir).parents[1]
aboutfile_path = os.path.join(install_dir, "lector", "resources", "about.html")
with open(aboutfile_path) as about_html:
html = about_html.readlines()
html.insert(8, f'<h3 style="text-align: center;">v{VERSION}</h3>\n')
self.aboutBox.setHtml("".join(html))
self.paths = None
self.thread = None
self.filesystemModel = None
self.tag_data_copy = None
english_string = self._translate("SettingsUI", "English")
spanish_string = self._translate("SettingsUI", "Spanish")
hindi_string = self._translate("SettingsUI", "Hindi")
languages = [english_string, spanish_string, hindi_string]
self.languageBox.addItems(languages)
current_language = self.main_window.settings["dictionary_language"]
if current_language == "en":
self.languageBox.setCurrentIndex(0)
elif current_language == "es":
self.languageBox.setCurrentIndex(1)
else:
self.languageBox.setCurrentIndex(2)
self.languageBox.activated.connect(self.change_dictionary_language)
self.okButton.setToolTip(
self._translate("SettingsUI", "Save changes and start library scan")
)
self.okButton.clicked.connect(self.start_library_scan)
self.cancelButton.clicked.connect(self.cancel_pressed)
# Radio buttons
if self.main_window.settings["icon_theme"] == "DarkIcons":
self.darkIconsRadio.setChecked(True)
else:
self.lightIconsRadio.setChecked(True)
self.darkIconsRadio.clicked.connect(self.change_icon_theme)
self.lightIconsRadio.clicked.connect(self.change_icon_theme)
# Check boxes
self.autoTags.setChecked(self.main_window.settings["auto_tags"])
self.coverShadows.setChecked(self.main_window.settings["cover_shadows"])
self.refreshLibrary.setChecked(self.main_window.settings["scan_library"])
self.fileRemember.setChecked(self.main_window.settings["remember_files"])
self.performCulling.setChecked(self.main_window.settings["perform_culling"])
self.cachingEnabled.setChecked(self.main_window.settings["caching_enabled"])
self.hideScrollBars.setChecked(self.main_window.settings["hide_scrollbars"])
self.attenuateTitles.setChecked(self.main_window.settings["attenuate_titles"])
self.navBarVisible.setChecked(self.main_window.settings["nav_bar"])
self.autoCover.setChecked(self.main_window.settings["auto_cover"])
self.scrollSpeedSlider.setValue(self.main_window.settings["scroll_speed"])
self.readAtPercent.setValue(self.main_window.settings["consider_read_at"])
self.smallIncrementBox.setValue(self.main_window.settings["small_increment"])
self.largeIncrementBox.setValue(self.main_window.settings["large_increment"])
self.autoTags.clicked.connect(self.manage_checkboxes)
self.coverShadows.clicked.connect(self.manage_checkboxes)
self.refreshLibrary.clicked.connect(self.manage_checkboxes)
self.fileRemember.clicked.connect(self.manage_checkboxes)
self.performCulling.clicked.connect(self.manage_checkboxes)
self.cachingEnabled.clicked.connect(self.manage_checkboxes)
self.hideScrollBars.clicked.connect(self.manage_checkboxes)
self.attenuateTitles.clicked.connect(self.manage_checkboxes)
self.navBarVisible.clicked.connect(self.manage_checkboxes)
self.autoCover.clicked.connect(self.manage_checkboxes)
self.scrollSpeedSlider.valueChanged.connect(self.change_scroll_speed)
self.readAtPercent.valueChanged.connect(self.change_read_at)
self.smallIncrementBox.valueChanged.connect(self.change_increment)
self.largeIncrementBox.valueChanged.connect(self.change_increment)
# Generate the QStandardItemModel for the listView
self.listModel = QtGui.QStandardItemModel(self.listView)
library_string = self._translate("SettingsUI", "Library")
switches_string = self._translate("SettingsUI", "Switches")
annotations_string = self._translate("SettingsUI", "Annotations")
about_string = self._translate("SettingsUI", "About")
list_options = [
library_string,
switches_string,
annotations_string,
about_string,
]
icon_dict = {0: "view-readermode", 1: "switches", 2: "annotate", 3: "about"}
for count, i in enumerate(list_options):
item = QtGui.QStandardItem()
item.setText(i)
this_icon = icon_dict[count]
item.setIcon(self.main_window.QImageFactory.get_image(this_icon))
self.listModel.appendRow(item)
self.listView.setModel(self.listModel)
# Custom signal to account for page changes
self.listView.newIndexSignal.connect(self.list_index_changed)
# Annotation related buttons
# Icon names
self.newAnnotation.setIcon(self.image_factory.get_image("add"))
self.deleteAnnotation.setIcon(self.image_factory.get_image("remove"))
self.editAnnotation.setIcon(self.image_factory.get_image("edit-rename"))
self.moveUp.setIcon(self.image_factory.get_image("arrow-up"))
self.moveDown.setIcon(self.image_factory.get_image("arrow-down"))
# Icon sizes
self.newAnnotation.setIconSize(QtCore.QSize(24, 24))
self.deleteAnnotation.setIconSize(QtCore.QSize(24, 24))
self.editAnnotation.setIconSize(QtCore.QSize(24, 24))
self.moveUp.setIconSize(QtCore.QSize(24, 24))
self.moveDown.setIconSize(QtCore.QSize(24, 24))
self.annotationsList.clicked.connect(self.load_annotation)
self.annotationsList.doubleClicked.connect(self.editAnnotation.click)
self.newAnnotation.clicked.connect(self.add_annotation)
self.deleteAnnotation.clicked.connect(self.delete_annotation)
self.editAnnotation.clicked.connect(self.load_annotation)
self.moveUp.clicked.connect(self.move_annotation)
self.moveDown.clicked.connect(self.move_annotation)
# Generate annotation settings
self.annotationModel = QtGui.QStandardItemModel()
self.generate_annotations()
# Generate the filesystem treeView
self.generate_tree()
# About... About
self.aboutTabWidget.setDocumentMode(True)
self.aboutTabWidget.setContentsMargins(0, 0, 0, 0)
self.logBox.setReadOnly(True)
# About buttons
self.resetButton.clicked.connect(self.delete_database)
self.clearLogButton.clicked.connect(self.clear_log)
# Hide the image annotation tab
# TODO
# Maybe get off your lazy ass and write something for this
self.tabWidget.setContentsMargins(0, 0, 0, 0)
self.tabWidget.tabBar().setVisible(False)
def list_index_changed(self, index):
switch_to = index.row()
self.stackedWidget.setCurrentIndex(switch_to)
valid_buttons = {
0: (self.okButton,),
3: (self.resetButton, self.clearLogButton),
}
for i in valid_buttons:
if i == switch_to:
for j in valid_buttons[i]:
j.setVisible(True)
else:
for j in valid_buttons[i]:
j.setVisible(False)
def generate_tree(self):
# Fetch all directories in the database
paths = database.DatabaseFunctions(self.database_path).fetch_data(
("Path", "Name", "Tags", "CheckState"), "directories", {"Path": ""}, "LIKE"
)
self.main_window.generate_library_filter_menu(paths)
directory_data = {}
if not paths:
logger.warning("No book paths saved")
else:
# Convert to the dictionary format that is
# to be fed into the QFileSystemModel
for i in paths:
directory_data[i[0]] = {"name": i[1], "tags": i[2], "check_state": i[3]}
self.filesystemModel = MostExcellentFileSystemModel(directory_data)
self.filesystemModel.setFilter(QtCore.QDir.NoDotAndDotDot | QtCore.QDir.Dirs)
self.treeView.setModel(self.filesystemModel)
# TODO
# This here might break on them pestilent non unixy OSes
# Check and see
root_directory = QtCore.QDir().rootPath()
self.treeView.setRootIndex(self.filesystemModel.setRootPath(root_directory))
# Set the treeView and QFileSystemModel to its desired state
selected_paths = [
i
for i in directory_data
if directory_data[i]["check_state"] == QtCore.Qt.Checked
]
expand_paths = set()
for i in selected_paths:
# Recursively grind down parent paths for expansion
this_path = i
while True:
parent_path = os.path.dirname(this_path)
if parent_path == this_path:
break
expand_paths.add(parent_path)
this_path = parent_path
# Expand all the parent paths derived from the selected path
if root_directory in expand_paths:
expand_paths.remove(root_directory)
for i in expand_paths:
this_index = self.filesystemModel.index(i)
self.treeView.expand(this_index)
header_sizes = self.main_window.settings["settings_dialog_headers"]
if header_sizes:
for count, i in enumerate((0, 4)):
self.treeView.setColumnWidth(i, int(header_sizes[count]))
# TODO
# Set a QSortFilterProxy model on top of the existing QFileSystem model
# self.filesystem_proxy_model = FileSystemProxyModel()
# self.filesystem_proxy_model.setSourceModel(self.filesystem_model)
# self.treeView.setModel(self.filesystem_proxy_model)
for i in range(1, 4):
self.treeView.hideColumn(i)
def start_library_scan(self):
self.hide()
data_pairs = []
for i in self.filesystemModel.tag_data.items():
data_pairs.append([i[0], i[1]["name"], i[1]["tags"], i[1]["check_state"]])
database.DatabaseFunctions(self.database_path).set_library_paths(data_pairs)
if not data_pairs:
logger.error("Can't scan - No book paths saved")
try:
if self.sender().objectName() == "reloadLibrary":
self.show()
treeViewIndex = self.listModel.index(0, 0)
self.listView.setCurrentIndex(treeViewIndex)
return
except AttributeError:
pass
database.DatabaseFunctions(self.database_path).delete_from_database(
"*", "*"
)
self.main_window.lib_ref.generate_model("build")
self.main_window.lib_ref.generate_proxymodels()
self.main_window.generate_library_filter_menu()
return
# Update the main window library filter menu
self.main_window.generate_library_filter_menu(data_pairs)
self.main_window.set_library_filter()
# Disallow rechecking until the first check completes
self.okButton.setEnabled(False)
self.main_window.libraryToolBar.reloadLibraryButton.setEnabled(False)
self.okButton.setToolTip(
self._translate("SettingsUI", "Library scan in progress...")
)
# Traverse directories looking for files
self.main_window.statusMessage.setText(
self._translate("SettingsUI", "Checking library folders")
)
self.thread = BackGroundBookSearch(data_pairs)
self.thread.finished.connect(self.finished_iterating)
self.thread.start()
def finished_iterating(self):
# The books the search thread has found
# are now in self.thread.valid_files
if not self.thread.valid_files:
self.main_window.move_on()
return
# Hey, messaging is important, okay?
self.main_window.statusBar.setVisible(True)
self.main_window.sorterProgress.setVisible(True)
self.main_window.statusMessage.setText(
self._translate("SettingsUI", "Parsing files")
)
# We now create a new thread to put those files into the database
self.thread = BackGroundBookAddition(
self.thread.valid_files, self.database_path, "automatic", self.main_window
)
self.thread.finished.connect(
lambda: self.main_window.move_on(self.thread.errors)
)
self.thread.start()
def cancel_pressed(self):
self.filesystemModel.tag_data = copy.deepcopy(self.tag_data_copy)
self.hide()
def hideEvent(self, event):
self.no_more_settings()
event.accept()
def showEvent(self, event):
# Load log into the plainTextEdit
with open(logger_filename) as infile:
log_text = infile.read()
self.logBox.setPlainText(log_text)
# Annotation preview
self.format_preview()
# Make copy of tags in case of a nope.jpg
self.tag_data_copy = copy.deepcopy(self.filesystemModel.tag_data)
event.accept()
def no_more_settings(self):
self.main_window.libraryToolBar.settingsButton.setChecked(False)
self.gather_annotations()
Settings(self.main_window).save_settings()
Settings(self.main_window).read_settings()
self.main_window.settings[
"last_open_tab"
] = None # Needed to allow focus change
# to newly opened book
self.resizeEvent()
def resizeEvent(self, event=None):
self.main_window.settings["settings_dialog_size"] = self.size()
self.main_window.settings["settings_dialog_position"] = self.pos()
table_headers = []
for i in [0, 4]:
table_headers.append(self.treeView.columnWidth(i))
self.main_window.settings["settings_dialog_headers"] = table_headers
def change_icon_theme(self):
if self.sender() == self.darkIconsRadio:
self.main_window.settings["icon_theme"] = "DarkIcons"
else:
self.main_window.settings["icon_theme"] = "LightIcons"
def change_dictionary_language(self, event):
language_dict = {0: "en", 1: "es", 2: "hi"}
self.main_window.settings["dictionary_language"] = language_dict[
self.languageBox.currentIndex()
]
def change_scroll_speed(self, event=None):
self.main_window.settings["scroll_speed"] = self.scrollSpeedSlider.value()
def change_read_at(self, event=None):
self.main_window.settings["consider_read_at"] = self.readAtPercent.value()
def change_increment(self, event=None):
self.main_window.settings["small_increment"] = self.smallIncrementBox.value()
self.main_window.settings["large_increment"] = self.largeIncrementBox.value()
def manage_checkboxes(self, event=None):
sender = self.sender().objectName()
sender_dict = {
"coverShadows": "cover_shadows",
"autoTags": "auto_tags",
"refreshLibrary": "scan_library",
"fileRemember": "remember_files",
"performCulling": "perform_culling",
"cachingEnabled": "caching_enabled",
"hideScrollBars": "hide_scrollbars",
"attenuateTitles": "attenuate_titles",
"navBarVisible": "nav_bar",
"autoCover": "auto_cover",
}
self.main_window.settings[sender_dict[sender]] = not self.main_window.settings[
sender_dict[sender]
]
if not self.performCulling.isChecked():
self.main_window.cover_functions.load_all_covers()
def generate_annotations(self):
saved_annotations = self.main_window.settings["annotations"]
for i in saved_annotations:
item = QtGui.QStandardItem()
item.setText(i["name"])
item.setData(i, QtCore.Qt.UserRole)
self.annotationModel.appendRow(item)
self.annotationsList.setModel(self.annotationModel)
def format_preview(self):
# Needed to clear the preview of annotation ickiness
cursor = QtGui.QTextCursor()
self.previewView.setTextCursor(cursor)
self.previewView.setText("Vidistine nuper imagines moventes bonas?")
profile_index = self.main_window.bookToolBar.profileBox.currentIndex()
current_profile = self.main_window.bookToolBar.profileBox.itemData(
profile_index, QtCore.Qt.UserRole
)
if not current_profile:
return
font = current_profile["font"]
self.foreground = current_profile["foreground"]
background = current_profile["background"]
font_size = current_profile["font_size"]
self.previewView.setStyleSheet(
"QTextEdit {{font-family: {0}; font-size: {1}px; color: {2}; background-color: {3}}}".format(
font, font_size, self.foreground.name(), background.name()
)
)
block_format = QtGui.QTextBlockFormat()
block_format.setAlignment(QtCore.Qt.AlignVCenter | QtCore.Qt.AlignHCenter)
cursor = self.previewView.textCursor()
while True:
old_position = cursor.position()
cursor.mergeBlockFormat(block_format)
cursor.movePosition(QtGui.QTextCursor.NextBlock, 0, 1)
new_position = cursor.position()
if old_position == new_position:
break
def add_annotation(self):
self.annotationsDialog.show_dialog("add")
def delete_annotation(self):
selected_index = self.annotationsList.currentIndex()
if not selected_index.isValid():
return
self.annotationModel.removeRow(self.annotationsList.currentIndex().row())
self.format_preview()
self.annotationsList.clearSelection()
def load_annotation(self):
selected_index = self.annotationsList.currentIndex()
if not selected_index.isValid():
return
if self.sender() == self.annotationsList:
self.annotationsDialog.show_dialog("preview", selected_index)
elif self.sender() == self.editAnnotation:
self.annotationsDialog.show_dialog("edit", selected_index)
def move_annotation(self):
current_row = self.annotationsList.currentIndex().row()
if self.sender() == self.moveUp:
new_row = current_row - 1
if new_row < 0:
return
elif self.sender() == self.moveDown:
new_row = current_row + 1
if new_row == self.annotationModel.rowCount():
return
row_out = self.annotationModel.takeRow(current_row)
self.annotationModel.insertRow(new_row, row_out)
new_index = self.annotationModel.index(new_row, 0)
self.annotationsList.setCurrentIndex(new_index)
def gather_annotations(self):
annotations_out = []
for i in range(self.annotationModel.rowCount()):
annotation_item = self.annotationModel.item(i, 0)
annotation_data = annotation_item.data(QtCore.Qt.UserRole)
annotations_out.append(annotation_data)
self.main_window.settings["annotations"] = annotations_out
def delete_database(self):
def ifcontinue(box_button):
if box_button.text() != "&Yes":
return
database_filename = os.path.join(
self.main_window.database_path, "Lector.db"
)
os.remove(database_filename)
QtWidgets.qApp.exit()
# Generate a message box to confirm deletion
confirm_deletion = QtWidgets.QMessageBox()
deletion_prompt = self._translate("SettingsUI", f"Delete database and exit?")
confirm_deletion.setText(deletion_prompt)
confirm_deletion.setIcon(QtWidgets.QMessageBox.Critical)
confirm_deletion.setWindowTitle(self._translate("SettingsUI", "Confirm"))
confirm_deletion.setStandardButtons(
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No
)
confirm_deletion.buttonClicked.connect(ifcontinue)
confirm_deletion.show()
confirm_deletion.exec_()
def clear_log(self):
self.logBox.clear()
open(logger_filename, "w").close()
|
telegram | alert_group_representative | import logging
from apps.alerts.models import AlertGroup
from apps.alerts.representative import AlertGroupAbstractRepresentative
from apps.telegram.models import TelegramMessage
from apps.telegram.tasks import (
edit_message,
on_create_alert_telegram_representative_async,
)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
class AlertGroupTelegramRepresentative(AlertGroupAbstractRepresentative):
def __init__(self, log_record):
self.log_record = log_record
def is_applicable(self):
from apps.telegram.models import (
TelegramToOrganizationConnector,
TelegramToUserConnector,
)
organization = self.log_record.alert_group.channel.organization
handler_exists = self.log_record.type in self.get_handlers_map().keys()
telegram_org_connector = TelegramToOrganizationConnector.objects.filter(
organization=organization
)
telegram_channel_configured = (
telegram_org_connector.exists() and telegram_org_connector[0].is_configured
)
is_user_in_org_using_telegram = TelegramToUserConnector.objects.filter(
user__organization=organization
).exists()
return handler_exists and (
telegram_channel_configured or is_user_in_org_using_telegram
)
@staticmethod
def get_handlers_map():
from apps.alerts.models import AlertGroupLogRecord
return {
AlertGroupLogRecord.TYPE_ACK: "alert_group_action",
AlertGroupLogRecord.TYPE_UN_ACK: "alert_group_action",
AlertGroupLogRecord.TYPE_AUTO_UN_ACK: "alert_group_action",
AlertGroupLogRecord.TYPE_RESOLVED: "alert_group_action",
AlertGroupLogRecord.TYPE_UN_RESOLVED: "alert_group_action",
AlertGroupLogRecord.TYPE_ACK_REMINDER_TRIGGERED: "alert_group_action",
AlertGroupLogRecord.TYPE_SILENCE: "alert_group_action",
AlertGroupLogRecord.TYPE_UN_SILENCE: "alert_group_action",
AlertGroupLogRecord.TYPE_ATTACHED: "alert_group_action",
AlertGroupLogRecord.TYPE_UNATTACHED: "alert_group_action",
}
# Process all alert group actions (ack, resolve, etc.)
def on_alert_group_action(self):
messages_to_edit = self.log_record.alert_group.telegram_messages.filter(
message_type__in=(
TelegramMessage.ALERT_GROUP_MESSAGE,
TelegramMessage.ACTIONS_MESSAGE,
TelegramMessage.PERSONAL_MESSAGE,
)
)
for message in messages_to_edit:
edit_message.delay(message_pk=message.pk)
@classmethod
def on_alert_group_update_log_report(cls, **kwargs):
logger.info("AlertGroupTelegramRepresentative UPDATE LOG REPORT SIGNAL")
alert_group = kwargs["alert_group"]
if not isinstance(alert_group, AlertGroup):
alert_group = AlertGroup.objects.get(pk=alert_group)
messages_to_edit = alert_group.telegram_messages.filter(
message_type__in=(
TelegramMessage.LOG_MESSAGE,
TelegramMessage.PERSONAL_MESSAGE,
)
)
for message in messages_to_edit:
edit_message.delay(message_pk=message.pk)
@classmethod
def on_alert_group_action_triggered(cls, **kwargs):
from apps.alerts.models import AlertGroupLogRecord
log_record = kwargs["log_record"]
logger.info(
f"AlertGroupTelegramRepresentative ACTION SIGNAL, log record {log_record}"
)
if not isinstance(log_record, AlertGroupLogRecord):
log_record = AlertGroupLogRecord.objects.get(pk=log_record)
instance = cls(log_record)
if instance.is_applicable():
handler = instance.get_handler()
handler()
@staticmethod
def on_create_alert(**kwargs):
alert_pk = kwargs["alert"]
on_create_alert_telegram_representative_async.apply_async((alert_pk,))
def get_handler(self):
handler_name = self.get_handler_name()
logger.info(f"Using '{handler_name}' handler to process action signal")
if hasattr(self, handler_name):
handler = getattr(self, handler_name)
else:
handler = self.on_handler_not_found
return handler
def get_handler_name(self):
return self.HANDLER_PREFIX + self.get_handlers_map()[self.log_record.type]
@classmethod
def on_handler_not_found(cls):
pass
|
src | gui | # MusicPlayer, https://github.com/albertz/music-player
# Copyright (c) 2012, Albert Zeyer, www.az2000.de
# All rights reserved.
# This code is under the 2-clause BSD license, see License.txt in the root directory of this project.
from __future__ import print_function
import sys
import appinfo
from TaskSystem import do_in_mainthread
from utils import safe_property
# define fallback
def main():
raise NotImplementedError
def guiMain():
pass
def locateFile(filename):
print("locateFile", utils.convertToUnicode(filename).encode("utf-8"))
def about():
import webbrowser
webbrowser.open("http://albertz.github.io/music-player/")
if sys.platform == "darwin":
defaultGui = "cocoa"
else:
defaultGui = "qt"
selectedGui = appinfo.args.gui
if not selectedGui:
selectedGui = defaultGui
else:
selectedGui = selectedGui[0]
if selectedGui == "none":
def main():
print("No GUI.")
else:
try:
if selectedGui == "cocoa":
from _gui import *
from guiCocoa import *
elif selectedGui == "qt":
from _gui import *
from guiQt import *
elif selectedGui == "html":
from guiHtml import *
else:
print("Error: Unknown GUI: %r" % selectedGui)
except Exception:
print("error in loading GUI implementation")
sys.excepthook(*sys.exc_info())
class _GuiObject:
def __repr__(self):
return "<%s %r %r>" % (self.__class__.__name__, self.subjectObject, self.attr)
@safe_property
@property
def name(self):
name = ""
obj = self
while True:
if obj.parent:
name = "." + obj.attr.name + name
obj = obj.parent
else:
name = obj.subjectObject.__class__.__name__ + name
break
return name
def allParents(self):
obj = self
while obj:
yield obj
obj = obj.parent
def childIter(self):
return self.childs.itervalues()
def updateSubjectObject(self):
if self.parent:
self.subjectObject = self.attr.__get__(self.parent.subjectObject)
if getattr(self.subjectObject, "_updateEvent", None):
self._updateHandler = lambda: do_in_mainthread(
self.updateContent, wait=False
)
getattr(self.subjectObject, "_updateEvent").register(self._updateHandler)
def updateChild(self, child):
if child.attr and child.attr.updateHandler:
try:
child.attr.updateHandler(self.subjectObject, child.attr)
except Exception:
sys.excepthook(*sys.exc_info())
child.updateContent()
def updateContent(self):
self.updateSubjectObject()
for control in self.childIter():
self.updateChild(control)
def guiObjectsInLine(self):
obj = self
while True:
if not getattr(obj, "leftGuiObject", None):
break
obj = obj.leftGuiObject
while obj:
yield obj
obj = getattr(obj, "rightGuiObject", None)
def layoutLine(self):
"""
In the parent, it searches for all objects which are in (horizontal) line
with us (via `guiObjectsInLine`). It then layouts their x-pos and sets
the autoresize mask on those controls.
"""
line = list(self.guiObjectsInLine())
minY = min([control.pos[1] for control in line])
maxH = max([control.size[1] for control in line])
# Set x-pos from left to right.
# XXX: Haven't we done this already in setupChilds()?
x = self.parent.OuterSpace[0]
for control in line:
spaceX = self.parent.DefaultSpace[0]
if control.attr.spaceX is not None:
spaceX = control.attr.spaceX
w, h = control.size
y = minY + (maxH - h) / 2.0
control.pos = (x, y)
x += w + spaceX
# Search the variable-width-control.
varWidthControl = None
for control in line:
if control.attr.variableWidth:
varWidthControl = control
break
if not varWidthControl:
varWidthControl = line[-1]
if varWidthControl.attr.variableWidth is False:
# It explicitly doesn't want to be of variable size.
# We can return because there is nothing to do anymore.
return
x = self.parent.innerSize[0] - self.parent.OuterSpace[0]
for control in reversed(line):
w, h = control.size
y = control.pos[1]
if control is varWidthControl:
w = x - control.pos[0]
x = control.pos[0]
control.pos = (x, y)
control.size = (w, h)
control.autoresize = (
control.autoresize[:2] + (True,) + control.autoresize[3:]
)
control.layout()
break
else:
x -= w
control.pos = (x, y)
control.size = (w, h)
control.autoresize = (True,) + control.autoresize[1:]
spaceX = self.parent.DefaultSpace[0]
if control.attr.spaceX is not None:
spaceX = control.attr.spaceX
x -= spaceX
def childGuiObjectsInColumn(self):
obj = self.firstChildGuiObject
while obj:
yield obj
while getattr(obj, "rightGuiObject", None):
obj = obj.rightGuiObject
obj = getattr(obj, "bottomGuiObject", None)
def layout(self):
"""
This layouts all the child controls according to our size,
and sets its autoresize mask.
In this function itself, we handle the variable-height-control,
and we call `layoutLine()` to handle the variable-width-controls.
"""
lastVertControls = list(self.childGuiObjectsInColumn())
if not lastVertControls:
return
# Search variable-height-control.
varHeightControl = None
for control in lastVertControls:
if control.attr.variableHeight:
varHeightControl = control
break
if not varHeightControl:
varHeightControl = lastVertControls[-1]
if varHeightControl.attr.variableHeight is False:
# It explicitly doesn't want to be of variable size.
varHeightControl = None
# Set y-pos from top to bottom, until we get to the varHeightControl.
# XXX: Exactly this is already done in setupChilds, isn't it?
if False:
y = self.OuterSpace[1]
for control in lastVertControls:
if control is varHeightControl:
break
x = control.pos[0]
control.pos = (x, y)
if control.attr.spaceY is not None:
y += control.attr.spaceY
else:
y += self.DefaultSpace[1]
y += control.size[1]
if varHeightControl:
# Set y-pos from bottom to top, until we get to the varHeightControl.
y = self.innerSize[1] - self.OuterSpace[1]
for control in reversed(lastVertControls):
w, h = control.size
x = control.pos[0]
if control is varHeightControl:
h = y - control.pos[1]
y = control.pos[1]
control.pos = (x, y)
control.size = (w, h)
control.autoresize = control.autoresize[0:3] + (True,)
# The size has changed, thus update its layout.
control.layout()
break
else:
y -= h
for lineControl in control.guiObjectsInLine():
lineControl.pos = (lineControl.pos[0], y)
lineControl.autoresize = (
lineControl.autoresize[0:1]
+ (True,)
+ lineControl.autoresize[2:4]
)
y -= self.DefaultSpace[1]
for control in lastVertControls:
control.layoutLine()
# If we are not auto-resizable in height,
# set our own height according to the last control.
if not self.autoresize[3]:
w, h = self.size
lastCtr = lastVertControls[-1]
h = lastCtr.pos[1] + lastCtr.size[1]
self.size = (w, h)
firstChildGuiObject = None
childs = {} # (attrName -> guiObject) map. this might change...
def setupChilds(self):
"""
If this is a container (a generic object), this creates + setups the child controls.
It does some initial layouting, also to calculate a size-indication, which is then returned.
However, you can set another size after it and you are supposed to call `layout()`
in the end.
"""
# self.updateSubjectObject() # XXX: make it explicit? break simple list interface
self.firstChildGuiObject = None
self.childs = {}
x, y = self.OuterSpace
maxX, maxY = 0, 0
lastControl = None
from UserAttrib import iterUserAttribs
for attr in iterUserAttribs(self.subjectObject):
try:
control = buildControl(attr, self)
except NotImplementedError as e:
print(e)
# Skip this control and continue. The rest of the GUI might still be usable.
continue
if not self.firstChildGuiObject:
self.firstChildGuiObject = control
if attr.hasUpdateEvent():
def controlUpdateHandler(control=control):
do_in_mainthread(lambda: self.updateChild(control), wait=False)
control._updateHandler = controlUpdateHandler
attr.updateEvent(self.subjectObject).register(control._updateHandler)
self.addChild(control)
self.childs[attr.name] = control
spaceX, spaceY = self.DefaultSpace
if attr.spaceX is not None:
spaceX = attr.spaceX
if attr.spaceY is not None:
spaceY = attr.spaceY
if attr.alignRight and lastControl: # align next right
x = lastControl.pos[0] + lastControl.size[0] + spaceX
# y from before
control.leftGuiObject = lastControl
if lastControl:
lastControl.rightGuiObject = control
elif lastControl: # align next below
x = self.OuterSpace[0]
y = maxY + spaceY
control.topGuiObject = lastControl
if lastControl:
lastControl.bottomGuiObject = control
else: # very first
pass
control.pos = (x, y)
control.autoresize = (
False,
False,
False,
False,
) # initial, might get changed in `layout()`
lastControl = control
maxX = max(maxX, control.pos[0] + control.size[0])
maxY = max(maxY, control.pos[1] + control.size[1])
control.updateContent()
# Recalculate layout based on current size and variable width/height controls.
# Note that there are some cases where this recalculation is not needed,
# but its much easier to just call it always now.
self.layout()
# Handy for now. This return might change.
return (maxX + self.OuterSpace[0], maxY + self.OuterSpace[1])
def handleApplicationQuit():
"""
Depending on the environment, this might be called multiple times.
It should do some cleanup and save the DBs and such.
Once this get called, the app is not expected to be in a
functional state anymore.
This is normally registerd via `atexit.register()` in `main()`.
"""
# Call sys.exitfunc() manually here now to ensure that we
# handled all that.
if hasattr(sys, "exitfunc"):
sysExitFunc, sys.exitfunc = sys.exitfunc, None
sysExitFunc()
import utils
if utils.quit > 1:
return # Already called before.
utils.quit = 1
# first set/send signals to all modules
from ModuleSystem import modules
for m in modules:
m.stop(join=False)
try:
# in case there are any subprocesses, interrupt them
# maybe some modules are hanging and waiting for such
import signal
os.kill(0, signal.SIGINT)
except KeyboardInterrupt:
pass # well, we expect that...
except Exception:
pass
# now join all
for m in modules:
m.stop()
# Do some cleanup before we let Python do the final cleanup.
# E.g., it might happen that Python will not GC the player instance
# soon enough in its `Py_Finalize()`. In that situation, bad things
# will happen, because most probably, the player instances worker
# thread is still running in the background. This most probably
# leads to a crash.
RootObjs.clear()
try:
ctx().rootObjs.clear()
except Exception:
pass # might already be out of scope
import State
State.state = None
import gc
for _ in range(3):
gc.collect()
utils.quit = 2
print("Bye!")
# On Mac/Win/Linux, these are the windows.
RootObjs = {}
class RootObj(object):
obj = None
guiObj = None
name = "Object"
title = None
priority = -10
keyShortcut = None
def __init__(self, **kwargs):
for key, value in kwargs.items():
if not hasattr(self, key):
raise AttributeError("%s invalid" % key)
if key.startswith("_"):
raise AttributeError("%s is read-only" % key)
setattr(self, key, value)
if self.title is None:
self.title = self.name
if self.__class__ is RootObj and self.obj is None:
raise AttributeError("obj must be set")
def registerRootObj(**kwargs):
desc = RootObj(**kwargs)
RootObjs[desc.name] = desc
class CtxRootObj(RootObj):
clazz = None
@property
def _rootObj(self):
c = ctx()
if self.name in c.rootObjs:
return c.rootObjs[self.name]
obj = self.clazz(ctx=c)
attribs = dict(
[(key, getattr(self, key)) for key in dir(self) if not key.startswith("_")]
)
del attribs["obj"]
rootObj = RootObj(obj=obj, **attribs)
c.rootObjs[self.name] = rootObj
return rootObj
@property
def obj(self):
return self._rootObj.obj
@property
def guiObj(self):
return self._rootObj.guiObj
@guiObj.setter
def guiObj(self, value):
self._rootObj.guiObj = value
def registerCtxRootObj(**kwargs):
desc = CtxRootObj(**kwargs)
RootObjs[desc.name] = desc
def iterRootObjs():
objs = list(RootObjs.values())
objs.sort(key=lambda o: o.priority, reverse=True)
return objs
# This function is later supposed to give the right gui context
# depending where we call it from. This can maybe be managed/set via
# contextlib or so.
# The context itself is supposed to provide objects like window list,
# current selected song (needed for SongEdit), etc.
# Right now, we just support a single context.
def ctx():
global _ctx
if _ctx:
return _ctx
from Events import Event
from utils import initBy
class Ctx(object):
# context-based root objects. via registerCtxRootObj()
@initBy
def rootObjs(self):
return {}
@property
def curSelectedSong(self):
song = getattr(self, "_curSelectedSong", None)
if song:
return song
# otherwise fall back to current song.
# it's better to have always one so if the window is created,
# the layout is correct.
# this is actually a hack: better would be if the re-layouting
# would work correctly in that case. we really should work
# out some generic nice and clean update-handling...
import State
return State.state.curSong
@curSelectedSong.setter
def curSelectedSong(self, obj):
self._curSelectedSong = obj
self.curSelectedSong_updateEvent.push()
@initBy
def curSelectedSong_updateEvent(self):
return Event()
_ctx = Ctx()
return _ctx
_ctx = None
|
webkit | webkittab | # SPDX-FileCopyrightText: Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""Wrapper over our (QtWebKit) WebView."""
import functools
import re
import xml.etree.ElementTree
from typing import Iterable, Optional, cast
from qutebrowser.browser import browsertab, shared
from qutebrowser.browser.webkit import (
tabhistory,
webkitelem,
webkitinspector,
webkitsettings,
webpage,
webview,
)
from qutebrowser.browser.webkit.network import networkmanager
from qutebrowser.keyinput import modeman
from qutebrowser.qt import sip
from qutebrowser.qt.core import QPoint, QSize, QSizeF, Qt, QTimer, QUrl, pyqtSlot
from qutebrowser.qt.gui import QIcon
# pylint: enable=no-name-in-module
from qutebrowser.qt.printsupport import QPrinter
from qutebrowser.qt.webkit import QWebElement, QWebHistory, QWebSettings
# pylint: disable=no-name-in-module
from qutebrowser.qt.webkitwidgets import QWebFrame, QWebPage
from qutebrowser.qt.widgets import QWidget
from qutebrowser.utils import debug, log, qtutils, resources, usertypes, utils
class WebKitAction(browsertab.AbstractAction):
"""QtWebKit implementations related to web actions."""
action_base = QWebPage.WebAction
_widget: webview.WebView
def exit_fullscreen(self):
raise browsertab.UnsupportedOperationError
def save_page(self):
"""Save the current page."""
raise browsertab.UnsupportedOperationError
def show_source(self, pygments=False):
self._show_source_pygments()
def run_string(self, name: str) -> None:
"""Add special cases for new API.
Those were added to QtWebKit 5.212 (which we enforce), but we don't get
the new API from PyQt. Thus, we'll need to use the raw numbers.
"""
new_actions = {
# https://github.com/qtwebkit/qtwebkit/commit/a96d9ef5d24b02d996ad14ff050d0e485c9ddc97
"RequestClose": QWebPage.WebAction.ToggleVideoFullscreen + 1,
# https://github.com/qtwebkit/qtwebkit/commit/96b9ba6269a5be44343635a7aaca4a153ea0366b
"Unselect": QWebPage.WebAction.ToggleVideoFullscreen + 2,
}
if name in new_actions:
self._widget.triggerPageAction(new_actions[name]) # type: ignore[arg-type]
return
super().run_string(name)
class WebKitPrinting(browsertab.AbstractPrinting):
"""QtWebKit implementations related to printing."""
_widget: webview.WebView
def check_pdf_support(self):
pass
def check_preview_support(self):
pass
def to_pdf(self, path):
printer = QPrinter()
printer.setOutputFileName(str(path))
self._widget.print(printer)
# Can't find out whether there was an error...
self.pdf_printing_finished.emit(str(path), True)
def to_printer(self, printer):
self._widget.print(printer)
# Can't find out whether there was an error...
self.printing_finished.emit(True)
class WebKitSearch(browsertab.AbstractSearch):
"""QtWebKit implementations related to searching on the page."""
_widget: webview.WebView
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._flags = self._empty_flags()
def _empty_flags(self):
return QWebPage.FindFlags(0) # type: ignore[call-overload]
def _args_to_flags(self, reverse, ignore_case):
flags = self._empty_flags()
if self._is_case_sensitive(ignore_case):
flags |= QWebPage.FindFlag.FindCaseSensitively
if reverse:
flags |= QWebPage.FindFlag.FindBackward
return flags
def _call_cb(self, callback, found, text, flags, caller):
"""Call the given callback if it's non-None.
Delays the call via a QTimer so the website is re-rendered in between.
Args:
callback: What to call
found: If the text was found
text: The text searched for
flags: The flags searched with
caller: Name of the caller.
"""
found_text = "found" if found else "didn't find"
# Removing FindWrapsAroundDocument to get the same logging as with
# QtWebEngine
debug_flags = debug.qflags_key(
QWebPage,
flags & ~QWebPage.FindFlag.FindWrapsAroundDocument,
klass=QWebPage.FindFlag,
)
if debug_flags != "0x0000":
flag_text = "with flags {}".format(debug_flags)
else:
flag_text = ""
log.webview.debug(" ".join([caller, found_text, text, flag_text]).strip())
if callback is not None:
if caller in ["prev_result", "next_result"]:
if found:
# no wrapping detection
cb_value = browsertab.SearchNavigationResult.found
elif flags & QWebPage.FindBackward:
cb_value = browsertab.SearchNavigationResult.wrap_prevented_top
else:
cb_value = browsertab.SearchNavigationResult.wrap_prevented_bottom
elif caller == "search":
cb_value = found
else:
raise utils.Unreachable(caller)
QTimer.singleShot(0, functools.partial(callback, cb_value))
self.finished.emit(found)
def clear(self):
if self.search_displayed:
self.cleared.emit()
self.search_displayed = False
# We first clear the marked text, then the highlights
self._widget.findText("")
self._widget.findText("", QWebPage.FindFlag.HighlightAllOccurrences) # type: ignore[arg-type]
def search(
self,
text,
*,
ignore_case=usertypes.IgnoreCase.never,
reverse=False,
result_cb=None,
):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug(
"Ignoring duplicate search request"
" for {}, but resetting flags".format(text)
)
self._flags = self._args_to_flags(reverse, ignore_case)
return
# Clear old search results, this is done automatically on QtWebEngine.
self.clear()
self.text = text
self.search_displayed = True
self._flags = self._args_to_flags(reverse, ignore_case)
# We actually search *twice* - once to highlight everything, then again
# to get a mark so we can navigate.
found = self._widget.findText(text, self._flags)
self._widget.findText(
text, self._flags | QWebPage.FindFlag.HighlightAllOccurrences
)
self._call_cb(result_cb, found, text, self._flags, "search")
def next_result(self, *, wrap=False, callback=None):
self.search_displayed = True
# The int() here makes sure we get a copy of the flags.
flags = QWebPage.FindFlags(int(self._flags)) # type: ignore[call-overload]
if wrap:
flags |= QWebPage.FindFlag.FindWrapsAroundDocument
found = self._widget.findText(self.text, flags) # type: ignore[arg-type]
self._call_cb(callback, found, self.text, flags, "next_result")
def prev_result(self, *, wrap=False, callback=None):
self.search_displayed = True
# The int() here makes sure we get a copy of the flags.
flags = QWebPage.FindFlags(int(self._flags)) # type: ignore[call-overload]
if flags & QWebPage.FindFlag.FindBackward:
flags &= ~QWebPage.FindFlag.FindBackward
else:
flags |= QWebPage.FindFlag.FindBackward
if wrap:
flags |= QWebPage.FindFlag.FindWrapsAroundDocument
found = self._widget.findText(self.text, flags) # type: ignore[arg-type]
self._call_cb(callback, found, self.text, flags, "prev_result")
class WebKitCaret(browsertab.AbstractCaret):
"""QtWebKit implementations related to moving the cursor/selection."""
_widget: webview.WebView
def __init__(
self,
tab: "WebKitTab",
mode_manager: modeman.ModeManager,
parent: QWidget = None,
) -> None:
super().__init__(tab, mode_manager, parent)
self._selection_state = browsertab.SelectionState.none
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
if self._widget.hasSelection():
self._selection_state = browsertab.SelectionState.normal
else:
self._selection_state = browsertab.SelectionState.none
self.selection_toggled.emit(self._selection_state)
settings = self._widget.settings()
settings.setAttribute(QWebSettings.WebAttribute.CaretBrowsingEnabled, True)
if self._widget.isVisible():
# Sometimes the caret isn't immediately visible, but unfocusing
# and refocusing it fixes that.
self._widget.clearFocus()
self._widget.setFocus(Qt.FocusReason.OtherFocusReason)
# Move the caret to the first element in the viewport if there
# isn't any text which is already selected.
#
# Note: We can't use hasSelection() here, as that's always
# true in caret mode.
if self._selection_state is browsertab.SelectionState.none:
self._widget.page().currentFrame().evaluateJavaScript(
resources.read_file("javascript/position_caret.js")
)
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, _mode):
settings = self._widget.settings()
if settings.testAttribute(QWebSettings.WebAttribute.CaretBrowsingEnabled):
if (
self._selection_state is not browsertab.SelectionState.none
and self._widget.hasSelection()
):
# Remove selection if it exists
self._widget.triggerPageAction(QWebPage.WebAction.MoveToNextChar)
settings.setAttribute(QWebSettings.WebAttribute.CaretBrowsingEnabled, False)
self._selection_state = browsertab.SelectionState.none
def move_to_next_line(self, count=1):
if self._selection_state is not browsertab.SelectionState.none:
act = QWebPage.WebAction.SelectNextLine
else:
act = QWebPage.WebAction.MoveToNextLine
for _ in range(count):
self._widget.triggerPageAction(act)
if self._selection_state is browsertab.SelectionState.line:
self._select_line_to_end()
def move_to_prev_line(self, count=1):
if self._selection_state is not browsertab.SelectionState.none:
act = QWebPage.WebAction.SelectPreviousLine
else:
act = QWebPage.WebAction.MoveToPreviousLine
for _ in range(count):
self._widget.triggerPageAction(act)
if self._selection_state is browsertab.SelectionState.line:
self._select_line_to_start()
def move_to_next_char(self, count=1):
if self._selection_state is browsertab.SelectionState.normal:
act = QWebPage.WebAction.SelectNextChar
elif self._selection_state is browsertab.SelectionState.line:
return
else:
act = QWebPage.WebAction.MoveToNextChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_char(self, count=1):
if self._selection_state is browsertab.SelectionState.normal:
act = QWebPage.WebAction.SelectPreviousChar
elif self._selection_state is browsertab.SelectionState.line:
return
else:
act = QWebPage.WebAction.MoveToPreviousChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_end_of_word(self, count=1):
if self._selection_state is browsertab.SelectionState.normal:
act = [QWebPage.WebAction.SelectNextWord]
if utils.is_windows: # pragma: no cover
act.append(QWebPage.WebAction.SelectPreviousChar)
elif self._selection_state is browsertab.SelectionState.line:
return
else:
act = [QWebPage.WebAction.MoveToNextWord]
if utils.is_windows: # pragma: no cover
act.append(QWebPage.WebAction.MoveToPreviousChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_next_word(self, count=1):
if self._selection_state is browsertab.SelectionState.normal:
act = [QWebPage.WebAction.SelectNextWord]
if not utils.is_windows: # pragma: no branch
act.append(QWebPage.WebAction.SelectNextChar)
elif self._selection_state is browsertab.SelectionState.line:
return
else:
act = [QWebPage.WebAction.MoveToNextWord]
if not utils.is_windows: # pragma: no branch
act.append(QWebPage.WebAction.MoveToNextChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_prev_word(self, count=1):
if self._selection_state is browsertab.SelectionState.normal:
act = QWebPage.WebAction.SelectPreviousWord
elif self._selection_state is browsertab.SelectionState.line:
return
else:
act = QWebPage.WebAction.MoveToPreviousWord
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_start_of_line(self):
if self._selection_state is browsertab.SelectionState.normal:
act = QWebPage.WebAction.SelectStartOfLine
elif self._selection_state is browsertab.SelectionState.line:
return
else:
act = QWebPage.WebAction.MoveToStartOfLine
self._widget.triggerPageAction(act)
def move_to_end_of_line(self):
if self._selection_state is browsertab.SelectionState.normal:
act = QWebPage.WebAction.SelectEndOfLine
elif self._selection_state is browsertab.SelectionState.line:
return
else:
act = QWebPage.WebAction.MoveToEndOfLine
self._widget.triggerPageAction(act)
def move_to_start_of_next_block(self, count=1):
if self._selection_state is not browsertab.SelectionState.none:
act = [
QWebPage.WebAction.SelectNextLine,
QWebPage.WebAction.SelectStartOfBlock,
]
else:
act = [
QWebPage.WebAction.MoveToNextLine,
QWebPage.WebAction.MoveToStartOfBlock,
]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
if self._selection_state is browsertab.SelectionState.line:
self._select_line_to_end()
def move_to_start_of_prev_block(self, count=1):
if self._selection_state is not browsertab.SelectionState.none:
act = [
QWebPage.WebAction.SelectPreviousLine,
QWebPage.WebAction.SelectStartOfBlock,
]
else:
act = [
QWebPage.WebAction.MoveToPreviousLine,
QWebPage.WebAction.MoveToStartOfBlock,
]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
if self._selection_state is browsertab.SelectionState.line:
self._select_line_to_start()
def move_to_end_of_next_block(self, count=1):
if self._selection_state is not browsertab.SelectionState.none:
act = [
QWebPage.WebAction.SelectNextLine,
QWebPage.WebAction.SelectEndOfBlock,
]
else:
act = [
QWebPage.WebAction.MoveToNextLine,
QWebPage.WebAction.MoveToEndOfBlock,
]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
if self._selection_state is browsertab.SelectionState.line:
self._select_line_to_end()
def move_to_end_of_prev_block(self, count=1):
if self._selection_state is not browsertab.SelectionState.none:
act = [
QWebPage.WebAction.SelectPreviousLine,
QWebPage.WebAction.SelectEndOfBlock,
]
else:
act = [
QWebPage.WebAction.MoveToPreviousLine,
QWebPage.WebAction.MoveToEndOfBlock,
]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
if self._selection_state is browsertab.SelectionState.line:
self._select_line_to_start()
def move_to_start_of_document(self):
if self._selection_state is not browsertab.SelectionState.none:
act = QWebPage.WebAction.SelectStartOfDocument
else:
act = QWebPage.WebAction.MoveToStartOfDocument
self._widget.triggerPageAction(act)
if self._selection_state is browsertab.SelectionState.line:
self._select_line()
def move_to_end_of_document(self):
if self._selection_state is not browsertab.SelectionState.none:
act = QWebPage.WebAction.SelectEndOfDocument
else:
act = QWebPage.WebAction.MoveToEndOfDocument
self._widget.triggerPageAction(act)
def toggle_selection(self, line=False):
if line:
self._selection_state = browsertab.SelectionState.line
self._select_line()
self.reverse_selection()
self._select_line()
self.reverse_selection()
elif self._selection_state is not browsertab.SelectionState.normal:
self._selection_state = browsertab.SelectionState.normal
else:
self._selection_state = browsertab.SelectionState.none
self.selection_toggled.emit(self._selection_state)
def drop_selection(self):
self._widget.triggerPageAction(QWebPage.WebAction.MoveToNextChar)
def selection(self, callback):
callback(self._widget.selectedText())
def reverse_selection(self):
self._tab.run_js_async(
"""{
const sel = window.getSelection();
sel.setBaseAndExtent(
sel.extentNode, sel.extentOffset, sel.baseNode,
sel.baseOffset
);
}"""
)
def _select_line(self):
self._widget.triggerPageAction(QWebPage.WebAction.SelectStartOfLine)
self.reverse_selection()
self._widget.triggerPageAction(QWebPage.WebAction.SelectEndOfLine)
self.reverse_selection()
def _select_line_to_end(self):
# direction of selection (if anchor is to the left or right
# of focus) has to be checked before moving selection
# to the end of line
if self._js_selection_left_to_right():
self._widget.triggerPageAction(QWebPage.WebAction.SelectEndOfLine)
def _select_line_to_start(self):
if not self._js_selection_left_to_right():
self._widget.triggerPageAction(QWebPage.WebAction.SelectStartOfLine)
def _js_selection_left_to_right(self):
"""Return True iff the selection's direction is left to right."""
return self._tab.private_api.run_js_sync(
"""
var sel = window.getSelection();
var position = sel.anchorNode.compareDocumentPosition(sel.focusNode);
(!position && sel.anchorOffset < sel.focusOffset ||
position === Node.DOCUMENT_POSITION_FOLLOWING);
"""
)
def _follow_selected(self, *, tab=False):
if QWebSettings.globalSettings().testAttribute(
QWebSettings.WebAttribute.JavascriptEnabled
):
if tab:
self._tab.data.override_target = usertypes.ClickTarget.tab
self._tab.run_js_async(
"""
const aElm = document.activeElement;
if (window.getSelection().anchorNode) {
window.getSelection().anchorNode.parentNode.click();
} else if (aElm && aElm !== document.body) {
aElm.click();
}
"""
)
else:
selection = self._widget.selectedHtml()
if not selection:
# Getting here may mean we crashed, but we can't do anything
# about that until this commit is released:
# https://github.com/annulen/webkit/commit/0e75f3272d149bc64899c161f150eb341a2417af
# TODO find a way to check if something is focused
self._follow_enter(tab)
return
try:
selected_element = xml.etree.ElementTree.fromstring(
"<html>{}</html>".format(selection)
).find("a")
except xml.etree.ElementTree.ParseError:
raise browsertab.WebTabError("Could not parse selected " "element!")
if selected_element is not None:
try:
href = selected_element.attrib["href"]
except KeyError:
raise browsertab.WebTabError("Anchor element without " "href!")
url = self._tab.url().resolved(QUrl(href))
if tab:
self._tab.new_tab_requested.emit(url)
else:
self._tab.load_url(url)
def follow_selected(self, *, tab=False):
try:
self._follow_selected(tab=tab)
finally:
self.follow_selected_done.emit()
class WebKitZoom(browsertab.AbstractZoom):
"""QtWebKit implementations related to zooming."""
_widget: webview.WebView
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
class WebKitScroller(browsertab.AbstractScroller):
"""QtWebKit implementations related to scrolling."""
# FIXME:qtwebengine When to use the main frame, when the current one?
_widget: webview.WebView
def pos_px(self):
return self._widget.page().mainFrame().scrollPosition()
def pos_perc(self):
return self._widget.scroll_pos
def to_point(self, point):
self._widget.page().mainFrame().setScrollPosition(point)
def to_anchor(self, name):
self._widget.page().mainFrame().scrollToAnchor(name)
def delta(self, x: int = 0, y: int = 0) -> None:
qtutils.check_overflow(x, "int")
qtutils.check_overflow(y, "int")
self._widget.page().mainFrame().scroll(x, y)
def delta_page(self, x: float = 0.0, y: float = 0.0) -> None:
if y.is_integer():
y = int(y)
if y == 0:
pass
elif y < 0:
self.page_up(count=-y)
elif y > 0:
self.page_down(count=y)
y = 0
if x == 0 and y == 0:
return
size = self._widget.page().mainFrame().geometry()
self.delta(int(x * size.width()), int(y * size.height()))
def to_perc(self, x=None, y=None):
if x is None and y == 0:
self.top()
elif x is None and y == 100:
self.bottom()
else:
for val, orientation in [
(x, Qt.Orientation.Horizontal),
(y, Qt.Orientation.Vertical),
]:
if val is not None:
frame = self._widget.page().mainFrame()
maximum = frame.scrollBarMaximum(orientation)
if maximum == 0:
continue
pos = int(maximum * val / 100)
pos = qtutils.check_overflow(pos, "int", fatal=False)
frame.setScrollBarValue(orientation, pos)
def _key_press(self, key, count=1, getter_name=None, direction=None):
frame = self._widget.page().mainFrame()
getter = None if getter_name is None else getattr(frame, getter_name)
# FIXME:qtwebengine needed?
# self._widget.setFocus()
for _ in range(min(count, 5000)):
# Abort scrolling if the minimum/maximum was reached.
if getter is not None and frame.scrollBarValue(direction) == getter(
direction
):
return
self._tab.fake_key_press(key)
def up(self, count=1):
self._key_press(
Qt.Key.Key_Up, count, "scrollBarMinimum", Qt.Orientation.Vertical
)
def down(self, count=1):
self._key_press(
Qt.Key.Key_Down, count, "scrollBarMaximum", Qt.Orientation.Vertical
)
def left(self, count=1):
self._key_press(
Qt.Key.Key_Left, count, "scrollBarMinimum", Qt.Orientation.Horizontal
)
def right(self, count=1):
self._key_press(
Qt.Key.Key_Right, count, "scrollBarMaximum", Qt.Orientation.Horizontal
)
def top(self):
self._key_press(Qt.Key.Key_Home)
def bottom(self):
self._key_press(Qt.Key.Key_End)
def page_up(self, count=1):
self._key_press(
Qt.Key.Key_PageUp, count, "scrollBarMinimum", Qt.Orientation.Vertical
)
def page_down(self, count=1):
self._key_press(
Qt.Key.Key_PageDown, count, "scrollBarMaximum", Qt.Orientation.Vertical
)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
frame = self._widget.page().currentFrame()
return self.pos_px().y() >= frame.scrollBarMaximum(Qt.Orientation.Vertical)
class WebKitHistoryPrivate(browsertab.AbstractHistoryPrivate):
"""History-related methods which are not part of the extension API."""
_history: QWebHistory
def __init__(self, tab: "WebKitTab") -> None:
self._tab = tab
self._history = cast(QWebHistory, None)
def serialize(self):
return qtutils.serialize(self._history)
def deserialize(self, data):
qtutils.deserialize(data, self._history)
def load_items(self, items):
if items:
self._tab.before_load_started.emit(items[-1].url)
stream, _data, user_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
for i, data in enumerate(user_data):
self._history.itemAt(i).setUserData(data)
cur_data = self._history.currentItem().userData()
if cur_data is not None:
if "zoom" in cur_data:
self._tab.zoom.set_factor(cur_data["zoom"])
if "scroll-pos" in cur_data and self._tab.scroller.pos_px() == QPoint(0, 0):
QTimer.singleShot(
0,
functools.partial(
self._tab.scroller.to_point, cur_data["scroll-pos"]
),
)
class WebKitHistory(browsertab.AbstractHistory):
"""QtWebKit implementations related to page history."""
def __init__(self, tab):
super().__init__(tab)
self.private_api = WebKitHistoryPrivate(tab)
def __len__(self):
return len(self._history)
def __iter__(self):
return iter(self._history.items())
def current_idx(self):
return self._history.currentItemIndex()
def current_item(self):
return self._history.currentItem()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
self._tab.before_load_started.emit(item.url())
self._history.goToItem(item)
def back_items(self):
return self._history.backItems(self._history.count())
def forward_items(self):
return self._history.forwardItems(self._history.count())
class WebKitElements(browsertab.AbstractElements):
"""QtWebKit implementations related to elements on the page."""
_tab: "WebKitTab"
_widget: webview.WebView
def find_css(self, selector, callback, error_cb, *, only_visible=False):
utils.unused(error_cb)
mainframe = self._widget.page().mainFrame()
if mainframe is None:
raise browsertab.WebTabError("No frame focused!")
elems = []
frames = webkitelem.get_child_frames(mainframe)
for f in frames:
frame_elems = cast(Iterable[QWebElement], f.findAllElements(selector))
for elem in frame_elems:
elems.append(webkitelem.WebKitElement(elem, tab=self._tab))
if only_visible:
# pylint: disable=protected-access
elems = [e for e in elems if e._is_visible(mainframe)]
# pylint: enable=protected-access
callback(elems)
def find_id(self, elem_id, callback):
def find_id_cb(elems):
"""Call the real callback with the found elements."""
if not elems:
callback(None)
else:
callback(elems[0])
# Escape non-alphanumeric characters in the selector
# https://www.w3.org/TR/CSS2/syndata.html#value-def-identifier
elem_id = re.sub(r"[^a-zA-Z0-9_-]", r"\\\g<0>", elem_id)
self.find_css("#" + elem_id, find_id_cb, error_cb=lambda exc: None)
def find_focused(self, callback):
frame = cast(Optional[QWebFrame], self._widget.page().currentFrame())
if frame is None:
callback(None)
return
elem = frame.findFirstElement("*:focus")
if elem.isNull():
callback(None)
else:
callback(webkitelem.WebKitElement(elem, tab=self._tab))
def find_at_pos(self, pos, callback):
assert pos.x() >= 0
assert pos.y() >= 0
frame = cast(Optional[QWebFrame], self._widget.page().frameAt(pos))
if frame is None:
# This happens when we click inside the webview, but not actually
# on the QWebPage - for example when clicking the scrollbar
# sometimes.
log.webview.debug("Hit test at {} but frame is None!".format(pos))
callback(None)
return
# You'd think we have to subtract frame.geometry().topLeft() from the
# position, but it seems QWebFrame::hitTestContent wants a position
# relative to the QWebView, not to the frame. This makes no sense to
# me, but it works this way.
hitresult = frame.hitTestContent(pos)
if hitresult.isNull():
# For some reason, the whole hit result can be null sometimes (e.g.
# on doodle menu links).
log.webview.debug("Hit test result is null!")
callback(None)
return
try:
elem = webkitelem.WebKitElement(hitresult.element(), tab=self._tab)
except webkitelem.IsNullError:
# For some reason, the hit result element can be a null element
# sometimes (e.g. when clicking the timetable fields on
# https://www.sbb.ch/ ).
log.webview.debug("Hit test result element is null!")
callback(None)
return
callback(elem)
class WebKitAudio(browsertab.AbstractAudio):
"""Dummy handling of audio status for QtWebKit."""
def set_muted(self, muted: bool, override: bool = False) -> None:
raise browsertab.WebTabError("Muting is not supported on QtWebKit!")
def is_muted(self):
return False
def is_recently_audible(self):
return False
class WebKitTabPrivate(browsertab.AbstractTabPrivate):
"""QtWebKit-related methods which aren't part of the public API."""
_widget: webview.WebView
def networkaccessmanager(self):
return self._widget.page().networkAccessManager()
def clear_ssl_errors(self):
self.networkaccessmanager().clear_all_ssl_errors()
def event_target(self):
return self._widget
def shutdown(self):
self._widget.shutdown()
def run_js_sync(self, code):
document_element = self._widget.page().mainFrame().documentElement()
result = document_element.evaluateJavaScript(code)
return result
def _init_inspector(self, splitter, win_id, parent=None):
return webkitinspector.WebKitInspector(splitter, win_id, parent)
class WebKitTab(browsertab.AbstractTab):
"""A QtWebKit tab in the browser."""
_widget: webview.WebView
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(
win_id=win_id, mode_manager=mode_manager, private=private, parent=parent
)
widget = webview.WebView(
win_id=win_id, tab_id=self.tab_id, private=private, tab=self
)
if private:
self._make_private(widget)
self.history = WebKitHistory(tab=self)
self.scroller = WebKitScroller(tab=self, parent=self)
self.caret = WebKitCaret(mode_manager=mode_manager, tab=self, parent=self)
self.zoom = WebKitZoom(tab=self, parent=self)
self.search = WebKitSearch(tab=self, parent=self)
self.printing = WebKitPrinting(tab=self, parent=self)
self.elements = WebKitElements(tab=self)
self.action = WebKitAction(tab=self)
self.audio = WebKitAudio(tab=self, parent=self)
self.private_api = WebKitTabPrivate(mode_manager=mode_manager, tab=self)
# We're assigning settings in _set_widget
self.settings = webkitsettings.WebKitSettings(settings=None)
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebKit
def _install_event_filter(self):
self._widget.installEventFilter(self._tab_event_filter)
def _make_private(self, widget):
settings = widget.settings()
settings.setAttribute(QWebSettings.WebAttribute.PrivateBrowsingEnabled, True)
def load_url(self, url):
self._load_url_prepare(url)
self._widget.load(url)
def url(self, *, requested=False):
frame = self._widget.page().mainFrame()
if requested:
return frame.requestedUrl()
else:
return frame.url()
def dump_async(self, callback, *, plain=False):
frame = self._widget.page().mainFrame()
if plain:
callback(frame.toPlainText())
else:
callback(frame.toHtml())
def run_js_async(self, code, callback=None, *, world=None):
if world is not None and world != usertypes.JsWorld.jseval:
log.webview.warning("Ignoring world ID {}".format(world))
result = self.private_api.run_js_sync(code)
if callback is not None:
callback(result)
def icon(self):
return self._widget.icon()
def reload(self, *, force=False):
if force:
action = QWebPage.WebAction.ReloadAndBypassCache
else:
action = QWebPage.WebAction.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
def renderer_process_pid(self) -> Optional[int]:
return None
@pyqtSlot()
def _on_history_trigger(self):
url = self.url()
requested_url = self.url(requested=True)
self.history_item_triggered.emit(url, requested_url, self.title())
def set_html(self, html, base_url=QUrl()):
self._widget.setHtml(html, base_url)
@pyqtSlot()
def _on_load_started(self):
super()._on_load_started()
nam = self._widget.page().networkAccessManager()
assert isinstance(nam, networkmanager.NetworkManager), nam
nam.netrc_used = False
# Make sure the icon is cleared when navigating to a page without one.
self.icon_changed.emit(QIcon())
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
super()._on_load_finished(ok)
self._update_load_status(ok)
@pyqtSlot()
def _on_frame_load_finished(self):
"""Make sure we emit an appropriate status when loading finished.
While Qt has a bool "ok" attribute for loadFinished, it always is True
when using error pages... See
https://github.com/qutebrowser/qutebrowser/issues/84
"""
page = self._widget.page()
assert isinstance(page, webpage.BrowserPage), page
self._on_load_finished(not page.error_occurred)
@pyqtSlot()
def _on_webkit_icon_changed(self):
"""Emit iconChanged with a QIcon like QWebEngineView does."""
if sip.isdeleted(self._widget):
log.webview.debug("Got _on_webkit_icon_changed for deleted view!")
return
self.icon_changed.emit(self._widget.icon())
@pyqtSlot(QWebFrame)
def _on_frame_created(self, frame):
"""Connect the contentsSizeChanged signal of each frame."""
# FIXME:qtwebengine those could theoretically regress:
# https://github.com/qutebrowser/qutebrowser/issues/152
# https://github.com/qutebrowser/qutebrowser/issues/263
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
@pyqtSlot(QSize)
def _on_contents_size_changed(self, size):
self.contents_size_changed.emit(QSizeF(size))
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
super()._on_navigation_request(navigation)
if not navigation.accepted:
return
log.webview.debug(
"target {} override {}".format(
self.data.open_target, self.data.override_target
)
)
if self.data.override_target is not None:
target = self.data.override_target
self.data.override_target = None
else:
target = self.data.open_target
if (
navigation.navigation_type == navigation.Type.link_clicked
and target != usertypes.ClickTarget.normal
):
tab = shared.get_tab(self.win_id, target)
tab.load_url(navigation.url)
self.data.open_target = usertypes.ClickTarget.normal
navigation.accepted = False
if navigation.is_main_frame:
self.settings.update_for_url(navigation.url)
@pyqtSlot("QNetworkReply*")
def _on_ssl_errors(self, reply):
self._insecure_hosts.add(reply.url().host())
def _connect_signals(self):
view = self._widget
page = view.page()
frame = page.mainFrame()
page.windowCloseRequested.connect( # type: ignore[attr-defined]
self.window_close_requested
)
page.linkHovered.connect( # type: ignore[attr-defined]
self.link_hovered
)
page.loadProgress.connect( # type: ignore[attr-defined]
self._on_load_progress
)
frame.loadStarted.connect( # type: ignore[attr-defined]
self._on_load_started
)
view.scroll_pos_changed.connect(self.scroller.perc_changed)
view.titleChanged.connect( # type: ignore[attr-defined]
self.title_changed
)
view.urlChanged.connect( # type: ignore[attr-defined]
self._on_url_changed
)
view.shutting_down.connect(self.shutting_down)
page.networkAccessManager().sslErrors.connect(self._on_ssl_errors)
frame.loadFinished.connect( # type: ignore[attr-defined]
self._on_frame_load_finished
)
view.iconChanged.connect( # type: ignore[attr-defined]
self._on_webkit_icon_changed
)
page.frameCreated.connect( # type: ignore[attr-defined]
self._on_frame_created
)
frame.contentsSizeChanged.connect( # type: ignore[attr-defined]
self._on_contents_size_changed
)
frame.initialLayoutCompleted.connect( # type: ignore[attr-defined]
self._on_history_trigger
)
page.navigation_request.connect( # type: ignore[attr-defined]
self._on_navigation_request
)
|
mako | compat | import sys
import time
py3k = sys.version_info >= (3, 0)
py33 = sys.version_info >= (3, 3)
py2k = sys.version_info < (3,)
py26 = sys.version_info >= (2, 6)
py27 = sys.version_info >= (2, 7)
jython = sys.platform.startswith("java")
win32 = sys.platform.startswith("win")
pypy = hasattr(sys, "pypy_version_info")
if py3k:
# create a "getargspec" from getfullargspec(), which is not deprecated
# in Py3K; getargspec() has started to emit warnings as of Py3.5.
# As of Py3.4, now they are trying to move from getfullargspec()
# to "signature()", but getfullargspec() is not deprecated, so stick
# with that for now.
import collections
ArgSpec = collections.namedtuple(
"ArgSpec", ["args", "varargs", "keywords", "defaults"]
)
from inspect import getfullargspec as inspect_getfullargspec
def inspect_getargspec(func):
return ArgSpec(*inspect_getfullargspec(func)[0:4])
else:
from inspect import getargspec as inspect_getargspec # noqa
if py3k:
import builtins as compat_builtins
from html.entities import codepoint2name, name2codepoint
from io import StringIO
from urllib.parse import quote_plus, unquote_plus
string_types = (str,)
binary_type = bytes
text_type = str
from io import BytesIO as byte_buffer
def u(s):
return s
def b(s):
return s.encode("latin-1")
def octal(lit):
return eval("0o" + lit)
else:
import __builtin__ as compat_builtins # noqa
try:
from cStringIO import StringIO
except:
from StringIO import StringIO
byte_buffer = StringIO
from urllib import quote_plus, unquote_plus # noqa
from htmlentitydefs import codepoint2name, name2codepoint # noqa
string_types = (basestring,) # noqa
binary_type = str
text_type = unicode # noqa
def u(s):
return unicode(s, "utf-8") # noqa
def b(s):
return s
def octal(lit):
return eval("0" + lit)
if py33:
from importlib import machinery
def load_module(module_id, path):
return machinery.SourceFileLoader(module_id, path).load_module()
else:
import imp
def load_module(module_id, path):
fp = open(path, "rb")
try:
return imp.load_source(module_id, path, fp)
finally:
fp.close()
if py3k:
def reraise(tp, value, tb=None, cause=None):
if cause is not None:
value.__cause__ = cause
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
exec("def reraise(tp, value, tb=None, cause=None):\n" " raise tp, value, tb\n")
def exception_as():
return sys.exc_info()[1]
try:
import threading
if py3k:
import _thread as thread
else:
import thread
except ImportError:
import dummy_threading as threading # noqa
if py3k:
import _dummy_thread as thread
else:
import dummy_thread as thread # noqa
if win32 or jython:
time_func = time.clock
else:
time_func = time.time
try:
from functools import partial
except:
def partial(func, *args, **keywords):
def newfunc(*fargs, **fkeywords):
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return func(*(args + fargs), **newkeywords)
return newfunc
all = all
import json # noqa
def exception_name(exc):
return exc.__class__.__name__
try:
from inspect import CO_VARARGS, CO_VARKEYWORDS
def inspect_func_args(fn):
if py3k:
co = fn.__code__
else:
co = fn.func_code
nargs = co.co_argcount
names = co.co_varnames
args = list(names[:nargs])
varargs = None
if co.co_flags & CO_VARARGS:
varargs = co.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if co.co_flags & CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
if py3k:
return args, varargs, varkw, fn.__defaults__
else:
return args, varargs, varkw, fn.func_defaults
except ImportError:
import inspect
def inspect_func_args(fn):
return inspect.getargspec(fn)
if py3k:
def callable(fn):
return hasattr(fn, "__call__")
else:
callable = callable
################################################
# cross-compatible metaclass implementation
# Copyright (c) 2010-2012 Benjamin Peterson
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("%sBase" % meta.__name__, (base,), {})
################################################
def arg_stringname(func_arg):
"""Gets the string name of a kwarg or vararg
In Python3.4 a function's args are
of _ast.arg type not _ast.name
"""
if hasattr(func_arg, "arg"):
return func_arg.arg
else:
return str(func_arg)
|
gui | MainWindow | """
Copyright 2008, 2009, 2011 Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
import logging
import os
from gi.repository import Gdk, GObject, Gtk
from ..core import Messages
from . import Actions, Bars, Utils
from .BlockTreeWindow import BlockTreeWindow
from .Console import Console
from .Constants import DEFAULT_CONSOLE_WINDOW_WIDTH, NEW_FLOGRAPH_TITLE
from .Dialogs import MessageDialogWrapper, TextDisplay
from .Notebook import Notebook, Page
from .VariableEditor import VariableEditor
log = logging.getLogger(__name__)
############################################################
# Main window
############################################################
class MainWindow(Gtk.ApplicationWindow):
"""The topmost window with menus, the tool bar, and other major windows."""
# Constants the action handler can use to indicate which panel visibility to change.
BLOCKS = 0
CONSOLE = 1
VARIABLES = 2
def __init__(self, app, platform):
"""
MainWindow constructor
Setup the menu, toolbar, flow graph editor notebook, block selection window...
"""
Gtk.ApplicationWindow.__init__(
self, title="GNU Radio Companion", application=app
)
log.debug("__init__()")
self._platform = platform
self.app = app
self.config = platform.config
# Add all "win" actions to the local
for x in Actions.get_actions():
if x.startswith("win."):
self.add_action(Actions.actions[x])
# Setup window
vbox = Gtk.VBox()
self.add(vbox)
icon_theme = Gtk.IconTheme.get_default()
icon = icon_theme.lookup_icon("gnuradio-grc", 48, 0)
if not icon:
# Set default window icon
self.set_icon_from_file(
os.path.dirname(os.path.abspath(__file__)) + "/icon.png"
)
else:
# Use gnuradio icon
self.set_icon(icon.load_icon())
# Create the menu bar and toolbar
log.debug("Creating menu")
generate_modes = platform.get_generate_options()
# This needs to be replaced
# Have an option for either the application menu or this menu
self.menu = Bars.Menu()
self.menu_bar = Gtk.MenuBar.new_from_model(self.menu)
vbox.pack_start(self.menu_bar, False, False, 0)
self.tool_bar = Bars.Toolbar()
self.tool_bar.set_hexpand(True)
# Show the toolbar
self.tool_bar.show()
vbox.pack_start(self.tool_bar, False, False, 0)
# Main parent container for the different panels
self.main = Gtk.HPaned() # (orientation=Gtk.Orientation.HORIZONTAL)
vbox.pack_start(self.main, True, True, 0)
# Create the notebook
self.notebook = Notebook()
self.page_to_be_closed = None
self.current_page = None # type: Page
# Create the console window
self.console = Console()
# Create the block tree and variable panels
self.btwin = BlockTreeWindow(platform)
self.btwin.connect("create_new_block", self._add_block_to_current_flow_graph)
self.vars = VariableEditor()
self.vars.connect("create_new_block", self._add_block_to_current_flow_graph)
self.vars.connect("remove_block", self._remove_block_from_current_flow_graph)
# Figure out which place to put the variable editor
self.left = Gtk.VPaned() # orientation=Gtk.Orientation.VERTICAL)
self.right = Gtk.VPaned() # orientation=Gtk.Orientation.VERTICAL)
# orientation=Gtk.Orientation.HORIZONTAL)
self.left_subpanel = Gtk.HPaned()
self.variable_panel_sidebar = self.config.variable_editor_sidebar()
if self.variable_panel_sidebar:
self.left.pack1(self.notebook)
self.left.pack2(self.console, False)
self.right.pack1(self.btwin)
self.right.pack2(self.vars, False)
else:
# Put the variable editor in a panel with the console
self.left.pack1(self.notebook)
self.left_subpanel.pack1(self.console, shrink=False)
self.left_subpanel.pack2(self.vars, resize=False, shrink=True)
self.left.pack2(self.left_subpanel, False)
# Create the right panel
self.right.pack1(self.btwin)
self.main.pack1(self.left)
self.main.pack2(self.right, False)
# Load preferences and show the main window
self.resize(*self.config.main_window_size())
self.main.set_position(self.config.blocks_window_position())
self.left.set_position(self.config.console_window_position())
if self.variable_panel_sidebar:
self.right.set_position(self.config.variable_editor_position(sidebar=True))
else:
self.left_subpanel.set_position(self.config.variable_editor_position())
self.show_all()
log.debug("Main window ready")
############################################################
# Event Handlers
############################################################
def _add_block_to_current_flow_graph(self, widget, key):
self.current_flow_graph.add_new_block(key)
def _remove_block_from_current_flow_graph(self, widget, key):
block = self.current_flow_graph.get_block(key)
self.current_flow_graph.remove_element(block)
def _quit(self, window, event):
"""
Handle the delete event from the main window.
Generated by pressing X to close, alt+f4, or right click+close.
This method in turns calls the state handler to quit.
Returns:
true
"""
Actions.APPLICATION_QUIT()
return True
def update_panel_visibility(self, panel, visibility=True):
"""
Handles changing visibility of panels.
"""
# Set the visibility for the requested panel, then update the containers if they need
# to be hidden as well.
if panel == self.BLOCKS:
if visibility:
self.btwin.show()
else:
self.btwin.hide()
elif panel == self.CONSOLE:
if visibility:
self.console.show()
else:
self.console.hide()
elif panel == self.VARIABLES:
if visibility:
self.vars.show()
else:
self.vars.hide()
else:
return
if self.variable_panel_sidebar:
# If both the variable editor and block panels are hidden, hide the right container
if not (self.btwin.get_property("visible")) and not (
self.vars.get_property("visible")
):
self.right.hide()
else:
self.right.show()
else:
if not (self.btwin.get_property("visible")):
self.right.hide()
else:
self.right.show()
if not (self.vars.get_property("visible")) and not (
self.console.get_property("visible")
):
self.left_subpanel.hide()
else:
self.left_subpanel.show()
############################################################
# Console Window
############################################################
@property
def current_page(self):
return self.notebook.current_page
@current_page.setter
def current_page(self, page):
self.notebook.current_page = page
def add_console_line(self, line):
"""
Place line at the end of the text buffer, then scroll its window all the way down.
Args:
line: the new text
"""
self.console.add_line(line)
############################################################
# Pages: create and close
############################################################
def new_page(self, file_path="", show=False):
"""
Create a new notebook page.
Set the tab to be selected.
Args:
file_path: optional file to load into the flow graph
show: true if the page should be shown after loading
"""
# if the file is already open, show the open page and return
if file_path and file_path in self._get_files(): # already open
page = self.notebook.get_nth_page(self._get_files().index(file_path))
self._set_page(page)
return
try: # try to load from file
if file_path:
Messages.send_start_load(file_path)
flow_graph = self._platform.make_flow_graph()
flow_graph.grc_file_path = file_path
# print flow_graph
page = Page(
self,
flow_graph=flow_graph,
file_path=file_path,
)
if getattr(Messages, "flowgraph_error") is not None:
Messages.send(
">>> Check: {}\n>>> FlowGraph Error: {}\n".format(
str(Messages.flowgraph_error_file),
str(Messages.flowgraph_error),
)
)
if file_path:
Messages.send_end_load()
except Exception as e: # return on failure
Messages.send_fail_load(e)
if isinstance(e, KeyError) and str(e) == "'options'":
# This error is unrecoverable, so crash gracefully
exit(-1)
return
# add this page to the notebook
self.notebook.append_page(page, page.tab)
self.notebook.set_tab_reorderable(page, True)
# only show if blank or manual
if not file_path or show:
self._set_page(page)
def close_pages(self):
"""
Close all the pages in this notebook.
Returns:
true if all closed
"""
open_files = [file for file in self._get_files() if file] # filter blank files
open_file = self.current_page.file_path
# close each page
for page in sorted(self.get_pages(), key=lambda p: p.saved):
self.page_to_be_closed = page
closed = self.close_page(False)
if not closed:
break
if self.notebook.get_n_pages():
return False
# save state before closing
self.config.set_open_files(open_files)
self.config.file_open(open_file)
self.config.main_window_size(self.get_size())
self.config.console_window_position(self.left.get_position())
self.config.blocks_window_position(self.main.get_position())
if self.variable_panel_sidebar:
self.config.variable_editor_position(
self.right.get_position(), sidebar=True
)
else:
self.config.variable_editor_position(self.left_subpanel.get_position())
self.config.save()
return True
def close_page(self, ensure=True):
"""
Close the current page.
If the notebook becomes empty, and ensure is true,
call new page upon exit to ensure that at least one page exists.
Args:
ensure: boolean
"""
if not self.page_to_be_closed:
self.page_to_be_closed = self.current_page
# show the page if it has an executing flow graph or is unsaved
if self.page_to_be_closed.process or not self.page_to_be_closed.saved:
self._set_page(self.page_to_be_closed)
# unsaved? ask the user
if not self.page_to_be_closed.saved:
response = (
self._save_changes()
) # return value can be OK, CLOSE, CANCEL, DELETE_EVENT, or NONE
if response == Gtk.ResponseType.OK:
Actions.FLOW_GRAPH_SAVE() # try to save
if not self.page_to_be_closed.saved: # still unsaved?
self.page_to_be_closed = (
None # set the page to be closed back to None
)
return False
elif response != Gtk.ResponseType.CLOSE:
self.page_to_be_closed = None
return False
# stop the flow graph if executing
if self.page_to_be_closed.process:
Actions.FLOW_GRAPH_KILL()
# remove the page
self.notebook.remove_page(self.notebook.page_num(self.page_to_be_closed))
if ensure and self.notebook.get_n_pages() == 0:
self.new_page() # no pages, make a new one
self.page_to_be_closed = None # set the page to be closed back to None
return True
############################################################
# Misc
############################################################
def update(self):
"""
Set the title of the main window.
Set the titles on the page tabs.
Show/hide the console window.
"""
page = self.current_page
basename = os.path.basename(page.file_path)
dirname = os.path.dirname(page.file_path)
Gtk.Window.set_title(
self,
"".join(
(
"*" if not page.saved else "",
basename if basename else NEW_FLOGRAPH_TITLE,
"(read only)" if page.get_read_only() else "",
" - ",
dirname if dirname else self._platform.config.name,
)
),
)
# set tab titles
for page in self.get_pages():
file_name = os.path.splitext(os.path.basename(page.file_path))[0]
page.set_markup(
'<span foreground="{foreground}">{title}{ro}</span>'.format(
foreground="black" if page.saved else "red",
ro=" (ro)" if page.get_read_only() else "",
title=Utils.encode(file_name or NEW_FLOGRAPH_TITLE),
)
)
fpath = page.file_path
if not fpath:
fpath = "(unsaved)"
page.set_tooltip(fpath)
# show/hide notebook tabs
self.notebook.set_show_tabs(len(self.get_pages()) > 1)
# Need to update the variable window when changing
self.vars.update_gui(self.current_flow_graph.blocks)
def update_pages(self):
"""
Forces a reload of all the pages in this notebook.
"""
for page in self.get_pages():
success = page.flow_graph.reload()
if success: # Only set saved if errors occurred during import
page.saved = False
@property
def current_flow_graph(self):
return self.current_page.flow_graph
def get_focus_flag(self):
"""
Get the focus flag from the current page.
Returns:
the focus flag
"""
return self.current_page.drawing_area.get_focus_flag()
############################################################
# Helpers
############################################################
def _set_page(self, page):
"""
Set the current page.
Args:
page: the page widget
"""
self.current_page = page
self.notebook.set_current_page(self.notebook.page_num(self.current_page))
def _save_changes(self):
"""
Save changes to flow graph?
Returns:
the response_id (see buttons variable below)
"""
buttons = (
"Close without saving",
Gtk.ResponseType.CLOSE,
Gtk.STOCK_CANCEL,
Gtk.ResponseType.CANCEL,
Gtk.STOCK_SAVE,
Gtk.ResponseType.OK,
)
return MessageDialogWrapper(
self,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.NONE,
"Unsaved Changes!",
"Would you like to save changes before closing?",
Gtk.ResponseType.OK,
buttons,
).run_and_destroy()
def _get_files(self):
"""
Get the file names for all the pages, in order.
Returns:
list of file paths
"""
return [page.file_path for page in self.get_pages()]
def get_pages(self):
"""
Get a list of all pages in the notebook.
Returns:
list of pages
"""
return [
self.notebook.get_nth_page(page_num)
for page_num in range(self.notebook.get_n_pages())
]
|
renderers | sms_renderer | from apps.alerts.incident_appearance.renderers.base_renderer import (
AlertBaseRenderer,
AlertGroupBaseRenderer,
)
from apps.alerts.incident_appearance.renderers.constants import DEFAULT_BACKUP_TITLE
from apps.alerts.incident_appearance.templaters import AlertSmsTemplater
from common.utils import str_or_backup
class AlertSmsRenderer(AlertBaseRenderer):
@property
def templater_class(self):
return AlertSmsTemplater
class AlertGroupSmsRenderer(AlertGroupBaseRenderer):
@property
def alert_renderer_class(self):
return AlertSmsRenderer
def render(self):
templated_alert = self.alert_renderer.templated_alert
title = str_or_backup(templated_alert.title, DEFAULT_BACKUP_TITLE)
return (
f"Grafana OnCall: Alert group #{self.alert_group.inside_organization_number}"
f'"{title}" from stack: "{self.alert_group.channel.organization.stack_slug}", '
f"integration: {self.alert_group.channel.short_name}, "
f"alerts registered: {self.alert_group.alerts.count()}."
)
|
mylar | parseit | # This file is part of Mylar.
#
# Mylar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mylar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
import datetime
import re
import sys
from decimal import Decimal
from time import strptime
import helpers
import logger
import mylar
import urllib2
from bs4 import BeautifulSoup, UnicodeDammit
from HTMLParser import HTMLParseError
def GCDScraper(ComicName, ComicYear, Total, ComicID, quickmatch=None):
NOWyr = datetime.date.today().year
if datetime.date.today().month == 12:
NOWyr = NOWyr + 1
logger.fdebug(
"We're in December, incremented search Year to increase search results: "
+ str(NOWyr)
)
comicnm = ComicName.encode("utf-8").strip()
comicyr = ComicYear
comicis = Total
comicid = ComicID
# print ( "comicname: " + str(comicnm) )
# print ( "comicyear: " + str(comicyr) )
# print ( "comichave: " + str(comicis) )
# print ( "comicid: " + str(comicid) )
comicnm_1 = re.sub("\+", "%2B", comicnm)
comicnm = re.sub(" ", "+", comicnm_1)
input = (
"http://www.comics.org/search/advanced/process/?target=series&method=icontains&logic=False&order2=date&order3=&start_date="
+ str(comicyr)
+ "-01-01&end_date="
+ str(NOWyr)
+ "-12-31&series="
+ str(comicnm)
+ "&is_indexed=None"
)
response = urllib2.urlopen(input)
soup = BeautifulSoup(response)
cnt1 = len(soup.findAll("tr", {"class": "listing_even"}))
cnt2 = len(soup.findAll("tr", {"class": "listing_odd"}))
cnt = int(cnt1 + cnt2)
# print (str(cnt) + " results")
resultName = []
resultID = []
resultYear = []
resultIssues = []
resultURL = None
n_odd = -1
n_even = -1
n = 0
while n < cnt:
if n % 2 == 0:
n_even += 1
resultp = soup.findAll("tr", {"class": "listing_even"})[n_even]
else:
n_odd += 1
resultp = soup.findAll("tr", {"class": "listing_odd"})[n_odd]
rtp = resultp("a")[1]
resultName.append(helpers.cleanName(rtp.findNext(text=True)))
# print ( "Comic Name: " + str(resultName[n]) )
fip = resultp("a", href=True)[1]
resultID.append(fip["href"])
# print ( "ID: " + str(resultID[n]) )
subtxt3 = resultp("td")[3]
resultYear.append(subtxt3.findNext(text=True))
resultYear[n] = resultYear[n].replace(" ", "")
subtxt4 = resultp("td")[4]
resultIssues.append(helpers.cleanName(subtxt4.findNext(text=True)))
resiss = resultIssues[n].find("issue")
resiss = int(resiss)
resultIssues[n] = resultIssues[n].replace("", "")[:resiss]
resultIssues[n] = resultIssues[n].replace(" ", "")
# print ( "Year: " + str(resultYear[n]) )
# print ( "Issues: " + str(resultIssues[n]) )
CleanComicName = re.sub(
"[\,\.\:\;'\[\]\(\)\!\@\#\$\%\^\&\*\-\_\+\=\?\/]", "", comicnm
)
CleanComicName = re.sub(" ", "", CleanComicName).lower()
CleanResultName = re.sub(
"[\,\.\:\;'\[\]\(\)\!\@\#\$\%\^\&\*\-\_\+\=\?\/]", "", resultName[n]
)
CleanResultName = re.sub(" ", "", CleanResultName).lower()
# print ("CleanComicName: " + str(CleanComicName))
# print ("CleanResultName: " + str(CleanResultName))
if CleanResultName == CleanComicName or CleanResultName[3:] == CleanComicName:
# if resultName[n].lower() == helpers.cleanName(str(ComicName)).lower():
# print ("n:" + str(n) + "...matched by name to Mylar!")
# this has been seen in a few instances already, so trying to adjust.
# when the series year is 2011, in gcd it might be 2012 due to publication
# dates overlapping between Dec/11 and Jan/12. Let's accept a match with a
# 1 year grace space, and then pull in the first issue to see the actual pub
# date and if coincides with the other date..match it.
if resultYear[n] == ComicYear or resultYear[n] == str(int(ComicYear) + 1):
# print ("n:" + str(n) + "...matched by year to Mylar!")
# print ( "Year: " + str(resultYear[n]) )
# Occasionally there are discrepancies in comic count between
# GCD and CV. 99% it's CV not updating to the newest issue as fast
# as GCD does. Therefore, let's increase the CV count by 1 to get it
# to match, any more variation could cause incorrect matching.
# ie. witchblade on GCD says 159 issues, CV states 161.
if (
int(resultIssues[n]) == int(Total)
or int(resultIssues[n]) == int(Total) + 1
or (int(resultIssues[n]) + 1) == int(Total)
):
# print ("initial issue match..continuing.")
if int(resultIssues[n]) == int(Total) + 1:
issvariation = "cv"
elif int(resultIssues[n]) + 1 == int(Total):
issvariation = "gcd"
else:
issvariation = "no"
# print ("n:" + str(n) + "...matched by issues to Mylar!")
# print ("complete match!...proceeding")
TotalIssues = resultIssues[n]
resultURL = str(resultID[n])
rptxt = resultp("td")[6]
resultPublished = rptxt.findNext(text=True)
# print ("Series Published: " + str(resultPublished))
break
n += 1
# it's possible that comicvine would return a comic name incorrectly, or gcd
# has the wrong title and won't match 100%...
# (ie. The Flash-2011 on comicvine is Flash-2011 on gcd)
# this section is to account for variations in spelling, punctuation, etc/
basnumbs = {
"one": 1,
"two": 2,
"three": 3,
"four": 4,
"five": 5,
"six": 6,
"seven": 7,
"eight": 8,
"nine": 9,
"ten": 10,
"eleven": 11,
"twelve": 12,
}
if resultURL is None:
# search for number as text, and change to numeric
for numbs in basnumbs:
# print ("numbs:" + str(numbs))
if numbs in ComicName.lower():
numconv = basnumbs[numbs]
# print ("numconv: " + str(numconv))
ComicNm = re.sub(str(numbs), str(numconv), ComicName.lower())
# print ("comicname-reVISED:" + str(ComicNm))
return GCDScraper(ComicNm, ComicYear, Total, ComicID)
break
if ComicName.lower().startswith("the "):
ComicName = ComicName[4:]
return GCDScraper(ComicName, ComicYear, Total, ComicID)
if ":" in ComicName:
ComicName = re.sub(":", "", ComicName)
return GCDScraper(ComicName, ComicYear, Total, ComicID)
if "-" in ComicName:
ComicName = re.sub("-", " ", ComicName)
return GCDScraper(ComicName, ComicYear, Total, ComicID)
if "and" in ComicName.lower():
ComicName = ComicName.replace("and", "&")
return GCDScraper(ComicName, ComicYear, Total, ComicID)
if not quickmatch:
return "No Match"
# vari_loop = 0
if quickmatch == "yes":
if resultURL is None:
return "No Match"
else:
return "Match"
return GCDdetails(
comseries=None,
resultURL=resultURL,
vari_loop=0,
ComicID=ComicID,
TotalIssues=TotalIssues,
issvariation=issvariation,
resultPublished=resultPublished,
)
def GCDdetails(
comseries, resultURL, vari_loop, ComicID, TotalIssues, issvariation, resultPublished
):
gcdinfo = {}
gcdchoice = []
gcount = 0
i = 0
# datemonth = {'one':1,'two':2,'three':3,'four':4,'five':5,'six':6,'seven':7,'eight':8,'nine':9,'ten':10,'eleven':$
# #search for number as text, and change to numeric
# for numbs in basnumbs:
# #print ("numbs:" + str(numbs))
# if numbs in ComicName.lower():
# numconv = basnumbs[numbs]
# #print ("numconv: " + str(numconv))
if vari_loop > 1:
resultPublished = "Unknown"
if vari_loop == 99:
vari_loop = 1
while i <= vari_loop:
if vari_loop > 0:
try:
boong = comseries["comseries"][i]
except IndexError:
break
resultURL = boong["comseriesID"]
ComicID = boong["comicid"]
TotalIssues += int(boong["comseriesIssues"])
else:
resultURL = resultURL
# if we're here - it means it's a mismatched name.
# let's pull down the publication date as it'll be blank otherwise
inputMIS = "http://www.comics.org" + str(resultURL)
resp = urllib2.urlopen(inputMIS)
# soup = BeautifulSoup ( resp )
try:
soup = BeautifulSoup(urllib2.urlopen(inputMIS))
except UnicodeDecodeError:
logger.info(
"I've detected your system is using: " + sys.stdout.encoding
)
logger.info(
"unable to parse properly due to utf-8 problem, ignoring wrong symbols"
)
try:
soup = BeautifulSoup(urllib2.urlopen(inputMIS)).decode(
"utf-8", "ignore"
)
except UnicodeDecodeError:
logger.info("not working...aborting. Tell Evilhero.")
return
# If CV doesn't have the Series Year (Stupid)...Let's store the Comics.org stated year just in case.
pyearit = soup.find("div", {"class": "item_data"})
pyeartxt = pyearit.find(text=re.compile(r"Series"))
pyearst = pyeartxt.index("Series")
ParseYear = pyeartxt[int(pyearst) - 5 : int(pyearst)]
parsed = soup.find("div", {"id": "series_data"})
# recent structure changes - need to adjust now
subtxt3 = parsed.find("dd", {"id": "publication_dates"})
resultPublished = subtxt3.findNext(text=True).rstrip()
# print ("pubdate:" + str(resultPublished))
parsfind = parsed.findAll("dt", {"class": "long"})
seriesloop = len(parsfind)
resultFormat = ""
for pf in parsfind:
if "Publishing Format:" in pf.findNext(text=True):
subtxt9 = pf.find("dd", {"id": "series_format"})
resultFormat = subtxt9.findNext(text=True).rstrip()
continue
# the caveat - if a series is ongoing but only has 1 issue published at a particular point in time,
# resultPublished will return just the date and not the word 'Present' which dictates on the main
# page if a series is Continuing / Ended .
if resultFormat != "":
if (
"ongoing series" in resultFormat.lower()
and "was" not in resultFormat.lower()
and "present" not in resultPublished.lower()
):
resultPublished = resultPublished + " - Present"
if "limited series" in resultFormat.lower() and "?" in resultPublished:
resultPublished = resultPublished + " (Limited Series)"
coverst = soup.find("div", {"id": "series_cover"})
if coverst < 0:
gcdcover = "None"
else:
subcoverst = coverst("img", src=True)[0]
gcdcover = subcoverst["src"]
# print ("resultURL:" + str(resultURL))
# print ("comicID:" + str(ComicID))
input2 = "http://www.comics.org" + str(resultURL) + "details/"
resp = urllib2.urlopen(input2)
soup = BeautifulSoup(resp)
# for newer comics, on-sale date has complete date...
# for older comics, pub.date is to be used
# type = soup.find(text=' On-sale date ')
type = soup.find(text=" Pub. Date ")
if type:
# print ("on-sale date detected....adjusting")
datetype = "pub"
else:
# print ("pub date defaulting")
datetype = "on-sale"
cnt1 = len(soup.findAll("tr", {"class": "row_even_False"}))
cnt2 = len(soup.findAll("tr", {"class": "row_even_True"}))
cnt = int(cnt1 + cnt2)
# print (str(cnt) + " Issues in Total (this may be wrong due to alternate prints, etc")
n_odd = -1
n_even = -1
n = 0
PI = "1.00"
altcount = 0
PrevYRMO = "0000-00"
while n < cnt:
if n % 2 == 0:
n_odd += 1
parsed = soup.findAll("tr", {"class": "row_even_False"})[n_odd]
ntype = "odd"
else:
n_even += 1
ntype = "even"
parsed = soup.findAll("tr", {"class": "row_even_True"})[n_even]
subtxt3 = parsed.find("a")
ParseIssue = subtxt3.findNext(text=True)
fid = parsed("a", href=True)[0]
resultGID = fid["href"]
resultID = resultGID[7:-1]
if "," in ParseIssue:
ParseIssue = re.sub("\,", "", ParseIssue)
variant = "no"
if (
"Vol" in ParseIssue
or "[" in ParseIssue
or "a" in ParseIssue
or "b" in ParseIssue
or "c" in ParseIssue
):
m = re.findall("[^\[\]]+", ParseIssue)
# ^^ takes care of []
# if it's a decimal - variant ...whoo-boy is messed.
if "." in m[0]:
dec_chk = m[0]
# if it's a digit before and after decimal, assume decimal issue
dec_st = dec_chk.find(".")
dec_b4 = dec_chk[:dec_st]
dec_ad = dec_chk[dec_st + 1 :]
dec_ad = re.sub("\s", "", dec_ad)
if dec_b4.isdigit() and dec_ad.isdigit():
# logger.fdebug("Alternate decimal issue...*Whew* glad I caught that")
ParseIssue = dec_b4 + "." + dec_ad
else:
# logger.fdebug("it's a decimal, but there's no digits before or after decimal")
# not a decimal issue, drop it down to the regex below.
ParseIssue = re.sub("[^0-9]", " ", dec_chk)
else:
ParseIssue = re.sub("[^0-9]", " ", m[0])
# ^^ removes everything but the digits from the remaining non-brackets
logger.fdebug("variant cover detected : " + str(ParseIssue))
variant = "yes"
altcount = 1
isslen = ParseIssue.find(" ")
if isslen < 0:
# logger.fdebug("just digits left..using " + str(ParseIssue))
isslen == 0
isschk = ParseIssue
# logger.fdebug("setting ParseIssue to isschk: " + str(isschk))
else:
# logger.fdebug("parse issue is " + str(ParseIssue))
# logger.fdebug("more than digits left - first space detected at position : " + str(isslen))
# if 'isslen' exists, it means that it's an alternative cover.
# however, if ONLY alternate covers exist of an issue it won't work.
# let's use the FIRST record, and ignore all other covers for the given issue.
isschk = ParseIssue[:isslen]
# logger.fdebug("Parsed Issue#: " + str(isschk))
ParseIssue = re.sub("\s", "", ParseIssue)
# check if decimal or '1/2' exists or not, and store decimal results
halfchk = "no"
if "." in isschk:
isschk_find = isschk.find(".")
isschk_b4dec = isschk[:isschk_find]
isschk_decval = isschk[isschk_find + 1 :]
# logger.fdebug("decimal detected for " + str(isschk))
# logger.fdebug("isschk_decval is " + str(isschk_decval))
if len(isschk_decval) == 1:
ParseIssue = isschk_b4dec + "." + str(int(isschk_decval) * 10)
elif "/" in isschk:
ParseIssue = "0.50"
isslen = 0
halfchk = "yes"
else:
isschk_decval = ".00"
ParseIssue = ParseIssue + isschk_decval
if variant == "yes":
# logger.fdebug("alternate cover detected - skipping/ignoring.")
altcount = 1
# in order to get the compare right, let's decimialize the string to '.00'.
# if halfchk == "yes": pass
# else:
# ParseIssue = ParseIssue + isschk_decval
datematch = "false"
if not any(d.get("GCDIssue", None) == str(ParseIssue) for d in gcdchoice):
# logger.fdebug("preparing to add issue to db : " + str(ParseIssue))
pass
else:
# logger.fdebug("2 identical issue #'s have been found...determining if it's intentional")
# get current issue & publication date.
# logger.fdebug("Issue #:" + str(ParseIssue))
# logger.fdebug("IssueDate: " + str(gcdinfo['ComicDate']))
# get conflicting issue from tuple
for d in gcdchoice:
if str(d["GCDIssue"]) == str(ParseIssue):
# logger.fdebug("Issue # already in tuple - checking IssueDate:" + str(d['GCDDate']) )
if str(d["GCDDate"]) == str(gcdinfo["ComicDate"]):
# logger.fdebug("Issue #'s and dates match...skipping.")
datematch = "true"
else:
# logger.fdebug("Issue#'s match but different publication dates, not skipping.")
datematch = "false"
if datematch == "false":
gcdinfo["ComicIssue"] = ParseIssue
# --- let's use pubdate.
# try publicationd date first
ParseDate = GettheDate(parsed, PrevYRMO)
ParseDate = ParseDate.replace(" ", "")
PrevYRMO = ParseDate
gcdinfo["ComicDate"] = ParseDate
# ^^ will retrieve date #
# logger.fdebug("adding: " + str(gcdinfo['ComicIssue']) + " - date: " + str(ParseDate))
if ComicID[:1] == "G":
gcdchoice.append(
{
"GCDid": ComicID,
"IssueID": resultID,
"GCDIssue": gcdinfo["ComicIssue"],
"GCDDate": gcdinfo["ComicDate"],
}
)
gcount += 1
else:
gcdchoice.append(
{
"GCDid": ComicID,
"GCDIssue": gcdinfo["ComicIssue"],
"GCDDate": gcdinfo["ComicDate"],
}
)
gcdinfo["gcdchoice"] = gcdchoice
altcount = 0
n += 1
i += 1
gcdinfo["gcdvariation"] = issvariation
if ComicID[:1] == "G":
gcdinfo["totalissues"] = gcount
else:
gcdinfo["totalissues"] = TotalIssues
gcdinfo["ComicImage"] = gcdcover
gcdinfo["resultPublished"] = resultPublished
gcdinfo["SeriesYear"] = ParseYear
gcdinfo["GCDComicID"] = resultURL.split("/")[0]
return gcdinfo
## -- end (GCD) -- ##
def GettheDate(parsed, PrevYRMO):
# --- let's use pubdate.
# try publicationd date first
# logger.fdebug("parsed:" + str(parsed))
subtxt1 = parsed("td")[1]
ParseDate = subtxt1.findNext(text=True).rstrip()
pformat = "pub"
if ParseDate is None or ParseDate == "":
subtxt1 = parsed("td")[2]
ParseDate = subtxt1.findNext(text=True)
pformat = "on-sale"
if len(ParseDate) < 7:
ParseDate = "0000-00" # invalid on-sale date format , drop it 0000-00 to avoid errors
basmonths = {
"january": "01",
"february": "02",
"march": "03",
"april": "04",
"may": "05",
"june": "06",
"july": "07",
"august": "08",
"september": "09",
"october": "10",
"november": "11",
"december": "12",
}
pdlen = len(ParseDate)
pdfind = ParseDate.find(" ", 2)
# logger.fdebug("length: " + str(pdlen) + "....first space @ pos " + str(pdfind))
# logger.fdebug("this should be the year: " + str(ParseDate[pdfind+1:pdlen-1]))
if pformat == "on-sale":
pass # date is in correct format...
else:
if ParseDate[pdfind + 1 : pdlen - 1].isdigit():
# assume valid date.
# search for number as text, and change to numeric
for numbs in basmonths:
if numbs in ParseDate.lower():
pconv = basmonths[numbs]
ParseYear = re.sub("/s", "", ParseDate[-5:])
ParseDate = str(ParseYear) + "-" + str(pconv)
# logger.fdebug("!success - Publication date: " + str(ParseDate))
break
# some comics are messed with pub.dates and have Spring/Summer/Fall/Winter
else:
baseseasons = {"spring": "03", "summer": "06", "fall": "09", "winter": "12"}
for seas in baseseasons:
if seas in ParseDate.lower():
sconv = baseseasons[seas]
ParseYear = re.sub("/s", "", ParseDate[-5:])
ParseDate = str(ParseYear) + "-" + str(sconv)
break
# #try key date
# subtxt1 = parsed('td')[2]
# ParseDate = subtxt1.findNext(text=True)
# #logger.fdebug("no pub.date detected, attempting to use on-sale date: " + str(ParseDate))
# if (ParseDate) < 7:
# #logger.fdebug("Invalid on-sale date - less than 7 characters. Trying Key date")
# subtxt3 = parsed('td')[0]
# ParseDate = subtxt3.findNext(text=True)
# if ParseDate == ' ':
# increment previous month by one and throw it in until it's populated properly.
if PrevYRMO == "0000-00":
ParseDate = "0000-00"
else:
PrevYR = str(PrevYRMO)[:4]
PrevMO = str(PrevYRMO)[5:]
# let's increment the month now (if it's 12th month, up the year and hit Jan.)
if int(PrevMO) == 12:
PrevYR = int(PrevYR) + 1
PrevMO = 1
else:
PrevMO = int(PrevMO) + 1
if int(PrevMO) < 10:
PrevMO = "0" + str(PrevMO)
ParseDate = str(PrevYR) + "-" + str(PrevMO)
# logger.fdebug("parseDAte:" + str(ParseDate))
return ParseDate
def GCDAdd(gcdcomicid):
serieschoice = []
series = {}
logger.fdebug("I'm trying to find these GCD comicid's:" + str(gcdcomicid))
for gcdid in gcdcomicid:
logger.fdebug("looking at gcdid:" + str(gcdid))
input2 = "http://www.comics.org/series/" + str(gcdid)
logger.fdebug("---url: " + str(input2))
resp = urllib2.urlopen(input2)
soup = BeautifulSoup(resp)
logger.fdebug("SeriesName section...")
parsen = soup.find("span", {"id": "series_name"})
# logger.fdebug("series name (UNPARSED): " + str(parsen))
subpar = parsen("a")[0]
resultName = subpar.findNext(text=True)
logger.fdebug("ComicName: " + str(resultName))
# covers-start
logger.fdebug("Covers section...")
coverst = soup.find("div", {"id": "series_cover"})
if coverst < 0:
gcdcover = "None"
logger.fdebug("unable to find any covers - setting to None")
else:
subcoverst = coverst("img", src=True)[0]
# logger.fdebug("cover (UNPARSED) : " + str(subcoverst))
gcdcover = subcoverst["src"]
logger.fdebug("Cover: " + str(gcdcover))
# covers end
# publisher start
logger.fdebug("Publisher section...")
try:
pubst = soup.find("div", {"class": "item_data"})
catchit = pubst("a")[0]
except (IndexError, TypeError):
pubst = soup.findAll("div", {"class": "left"})[1]
catchit = pubst.find("a")
publisher = catchit.findNext(text=True)
logger.fdebug("Publisher: " + str(publisher))
# publisher end
parsed = soup.find("div", {"id": "series_data"})
# logger.fdebug("series_data: " + str(parsed))
# print ("parse:" + str(parsed))
subtxt3 = parsed.find("dd", {"id": "publication_dates"})
# logger.fdebug("publication_dates: " + str(subtxt3))
pubdate = subtxt3.findNext(text=True).rstrip()
logger.fdebug("pubdate:" + str(pubdate))
subtxt4 = parsed.find("dd", {"id": "issues_published"})
noiss = subtxt4.findNext(text=True)
lenwho = len(noiss)
lent = noiss.find(" ", 2)
lenf = noiss.find("(")
stringit = noiss[lenf:lenwho]
stringout = noiss[:lent]
noissues = stringout.rstrip(" \t\r\n\0")
numbering = stringit.rstrip(" \t\r\n\0")
logger.fdebug("noissues:" + str(noissues))
logger.fdebug("numbering:" + str(numbering))
serieschoice.append(
{
"ComicID": gcdid,
"ComicName": resultName,
"ComicYear": pubdate,
"ComicIssues": noissues,
"ComicPublisher": publisher,
"ComicCover": gcdcover,
}
)
series["serieschoice"] = serieschoice
return series
def ComChk(ComicName, ComicYear, ComicPublisher, Total, ComicID):
comchkchoice = []
comchoice = {}
NOWyr = datetime.date.today().year
if datetime.date.today().month == 12:
NOWyr = NOWyr + 1
logger.fdebug(
"We're in December, incremented search Year to increase search results: "
+ str(NOWyr)
)
comicnm = ComicName.encode("utf-8").strip()
comicyr = ComicYear
comicis = Total
comicid = ComicID
comicpub = ComicPublisher.encode("utf-8").strip()
# print ("...comchk parser initialization...")
# print ( "comicname: " + str(comicnm) )
# print ( "comicyear: " + str(comicyr) )
# print ( "comichave: " + str(comicis) )
# print ( "comicpub: " + str(comicpub) )
# print ( "comicid: " + str(comicid) )
# do 3 runs at the comics.org search to get the best results
comicrun = []
# &pub_name=DC
# have to remove the spaces from Publisher or else will not work (ie. DC Comics vs DC will not match)
# take the 1st word ;)
# comicpub = comicpub.split()[0]
# if it's not one of the BIG publisher's it might fail - so let's increase the odds.
pubbiggies = ["DC", "Marvel", "Image", "IDW"]
uhuh = "no"
for pb in pubbiggies:
if pb in comicpub:
# keep publisher in url if a biggie.
uhuh = "yes"
# print (" publisher match : " + str(comicpub))
conv_pub = comicpub.split()[0]
# print (" converted publisher to : " + str(conv_pub))
# 1st run setup - leave it all as it is.
comicrun.append(comicnm)
cruncnt = 0
# 2nd run setup - remove the last character and do a broad search (keep year or else will blow up)
if len(str(comicnm).split()) > 2:
comicrun.append(" ".join(comicnm.split(" ")[:-1]))
cruncnt += 1
# to increase the likely hood of matches and to get a broader scope...
# lets remove extra characters
if re.sub("[\.\,\:]", "", comicnm) != comicnm:
comicrun.append(re.sub("[\.\,\:]", "", comicnm))
cruncnt += 1
# one more addition - if the title contains a 'the', remove it ;)
if comicnm.lower().startswith("the"):
comicrun.append(comicnm[4:].strip())
cruncnt += 1
totalcount = 0
cr = 0
# print ("cruncnt is " + str(cruncnt))
while cr <= cruncnt:
# print ("cr is " + str(cr))
comicnm = comicrun[cr]
# leaving spaces in will screw up the search...let's take care of it
comicnm = re.sub(" ", "+", comicnm)
# print ("comicnm: " + str(comicnm))
if uhuh == "yes":
publink = "&pub_name=" + str(conv_pub)
if uhuh == "no":
publink = "&pub_name="
input = (
"http://www.comics.org/search/advanced/process/?target=series&method=icontains&logic=False&keywords=&order1=series&order2=date&order3=&start_date="
+ str(comicyr)
+ "-01-01&end_date="
+ str(NOWyr)
+ "-12-31"
+ "&title=&feature=&job_number=&pages=&script=&pencils=&inks=&colors=&letters=&story_editing=&genre=&characters=&synopsis=&reprint_notes=&story_reprinted=None¬es="
+ str(publink)
+ "&pub_notes=&brand=&brand_notes=&indicia_publisher=&is_surrogate=None&ind_pub_notes=&series="
+ str(comicnm)
+ "&series_year_began=&series_notes=&tracking_notes=&issue_count=&is_comics=None&format=&color=&dimensions=&paper_stock=&binding=&publishing_format=&issues=&volume=&issue_title=&variant_name=&issue_date=&indicia_frequency=&price=&issue_pages=&issue_editing=&isbn=&barcode=&issue_notes=&issue_reprinted=None&is_indexed=None"
)
response = urllib2.urlopen(input)
soup = BeautifulSoup(response)
cnt1 = len(soup.findAll("tr", {"class": "listing_even"}))
cnt2 = len(soup.findAll("tr", {"class": "listing_odd"}))
cnt = int(cnt1 + cnt2)
# print ("cnt1: " + str(cnt1))
# print ("cnt2: " + str(cnt2))
# print (str(cnt) + " results")
resultName = []
resultID = []
resultYear = []
resultIssues = []
resultPublisher = []
resultURL = None
n_odd = -1
n_even = -1
n = 0
while n < cnt:
if n % 2 == 0:
n_even += 1
resultp = soup.findAll("tr", {"class": "listing_even"})[n_even]
else:
n_odd += 1
resultp = soup.findAll("tr", {"class": "listing_odd"})[n_odd]
rtp = resultp("a")[1]
rtpit = rtp.findNext(text=True)
rtpthis = rtpit.encode("utf-8").strip()
resultName.append(helpers.cleanName(rtpthis))
# print ( "Comic Name: " + str(resultName[n]) )
pub = resultp("a")[0]
pubit = pub.findNext(text=True)
# pubthis = u' '.join(pubit).encode('utf-8').strip()
pubthis = pubit.encode("utf-8").strip()
resultPublisher.append(pubthis)
# print ( "Publisher: " + str(resultPublisher[n]) )
fip = resultp("a", href=True)[1]
resultID.append(fip["href"])
# print ( "ID: " + str(resultID[n]) )
subtxt3 = resultp("td")[3]
resultYear.append(subtxt3.findNext(text=True))
resultYear[n] = resultYear[n].replace(" ", "")
subtxt4 = resultp("td")[4]
resultIssues.append(helpers.cleanName(subtxt4.findNext(text=True)))
resiss = resultIssues[n].find("issue")
resiss = int(resiss)
resultIssues[n] = resultIssues[n].replace("", "")[:resiss]
resultIssues[n] = resultIssues[n].replace(" ", "")
# print ( "Year: " + str(resultYear[n]) )
# print ( "Issues: " + str(resultIssues[n]) )
# print ("comchkchoice: " + str(comchkchoice))
if not any(d.get("GCDID", None) == str(resultID[n]) for d in comchkchoice):
# print ( str(resultID[n]) + " not in DB...adding.")
comchkchoice.append(
{
"ComicID": str(comicid),
"ComicName": resultName[n],
"GCDID": str(resultID[n]).split("/")[2],
"ComicYear": str(resultYear[n]),
"ComicPublisher": resultPublisher[n],
"ComicURL": "http://www.comics.org" + str(resultID[n]),
"ComicIssues": str(resultIssues[n]),
}
)
# else:
# print ( str(resultID[n]) + " already in DB...skipping" )
n += 1
cr += 1
totalcount = totalcount + cnt
comchoice["comchkchoice"] = comchkchoice
return comchoice, totalcount
def decode_html(html_string):
converted = UnicodeDammit(html_string)
if not converted.unicode:
raise UnicodeDecodeError(
"Failed to detect encoding, tried [%s]", ", ".join(converted.triedEncodings)
)
# print converted.originalEncoding
return converted.unicode
def annualCheck(gcomicid, comicid, comicname, comicyear):
# will only work if we already matched for gcd.
# search for <comicname> annual
# grab annual listing that hits on comicyear (seriesyear)
# grab results :)
print("GcomicID: " + str(gcomicid))
print("comicID: " + str(comicid))
print("comicname: " + comicname)
print("comicyear: " + str(comicyear))
comicnm = comicname.encode("utf-8").strip()
comicnm_1 = re.sub("\+", "%2B", comicnm + " annual")
comicnm = re.sub(" ", "+", comicnm_1)
input = (
"http://www.comics.org/search/advanced/process/?target=series&method=icontains&logic=False&order2=date&order3=&start_date="
+ str(comicyear)
+ "-01-01&end_date="
+ str(comicyear)
+ "-12-31&series="
+ str(comicnm)
+ "&is_indexed=None"
)
response = urllib2.urlopen(input)
soup = BeautifulSoup(response)
cnt1 = len(soup.findAll("tr", {"class": "listing_even"}))
cnt2 = len(soup.findAll("tr", {"class": "listing_odd"}))
cnt = int(cnt1 + cnt2)
print(str(cnt) + " results")
resultName = []
resultID = []
resultYear = []
resultIssues = []
resultURL = None
n_odd = -1
n_even = -1
n = 0
while n < cnt:
if n % 2 == 0:
n_even += 1
resultp = soup.findAll("tr", {"class": "listing_even"})[n_even]
else:
n_odd += 1
resultp = soup.findAll("tr", {"class": "listing_odd"})[n_odd]
rtp = resultp("a")[1]
rtp1 = re.sub("Annual", "", rtp)
resultName.append(helpers.cleanName(rtp1.findNext(text=True)))
print("Comic Name: " + str(resultName[n]))
fip = resultp("a", href=True)[1]
resultID.append(fip["href"])
print("ID: " + str(resultID[n]))
subtxt3 = resultp("td")[3]
resultYear.append(subtxt3.findNext(text=True))
resultYear[n] = resultYear[n].replace(" ", "")
subtxt4 = resultp("td")[4]
resultIssues.append(helpers.cleanName(subtxt4.findNext(text=True)))
resiss = resultIssues[n].find("issue")
resiss = int(resiss)
resultIssues[n] = resultIssues[n].replace("", "")[:resiss]
resultIssues[n] = resultIssues[n].replace(" ", "")
print("Year: " + str(resultYear[n]))
print("Issues: " + str(resultIssues[n]))
CleanComicName = re.sub(
"[\,\.\:\;'\[\]\(\)\!\@\#\$\%\^\&\*\-\_\+\=\?\/]", "", comicnm
)
CleanComicName = re.sub(" ", "", CleanComicName).lower()
CleanResultName = re.sub(
"[\,\.\:\;'\[\]\(\)\!\@\#\$\%\^\&\*\-\_\+\=\?\/]", "", resultName[n]
)
CleanResultName = re.sub(" ", "", CleanResultName).lower()
print("CleanComicName: " + str(CleanComicName))
print("CleanResultName: " + str(CleanResultName))
if CleanResultName == CleanComicName or CleanResultName[3:] == CleanComicName:
# if resultName[n].lower() == helpers.cleanName(str(ComicName)).lower():
# print ("n:" + str(n) + "...matched by name to Mylar!")
if resultYear[n] == ComicYear or resultYear[n] == str(int(ComicYear) + 1):
print("n:" + str(n) + "...matched by year to Mylar!")
print("Year: " + str(resultYear[n]))
TotalIssues = resultIssues[n]
resultURL = str(resultID[n])
rptxt = resultp("td")[6]
resultPublished = rptxt.findNext(text=True)
# print ("Series Published: " + str(resultPublished))
break
n += 1
return
|
markupsafe | _native | # -*- coding: utf-8 -*-
"""
markupsafe._native
~~~~~~~~~~~~~~~~~~
Native Python implementation the C module is not compiled.
:copyright: (c) 2010 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from markupsafe import Markup
from markupsafe._compat import text_type
def escape(s):
"""Convert the characters &, <, >, ' and " in string s to HTML-safe
sequences. Use this if you need to display text that might contain
such characters in HTML. Marks return value as markup string.
"""
if hasattr(s, "__html__"):
return s.__html__()
return Markup(
text_type(s)
.replace("&", "&")
.replace(">", ">")
.replace("<", "<")
.replace("'", "'")
.replace('"', """)
)
def escape_silent(s):
"""Like :func:`escape` but converts `None` into an empty
markup string.
"""
if s is None:
return Markup()
return escape(s)
def soft_unicode(s):
"""Make a string unicode if it isn't already. That way a markup
string is not converted back to unicode.
"""
if not isinstance(s, text_type):
s = text_type(s)
return s
|
ordered-model | serializer | from common.api_helpers.exceptions import BadRequest
from rest_framework import serializers
class OrderedModelSerializer(serializers.ModelSerializer):
"""Ordered model serializer to be used in public API."""
position = serializers.IntegerField(required=False, source="order")
# manual_order=True is intended for use by Terraform provider only, and is not a documented feature.
manual_order = serializers.BooleanField(default=False, write_only=True)
class Meta:
fields = ["position", "manual_order"]
def create(self, validated_data):
# Remove "manual_order" and "order" fields from validated_data, so they are not passed to create method.
manual_order = validated_data.pop("manual_order", False)
order = validated_data.pop("order", None)
# Create the instance.
# Instances are always created at the end of the list, and then moved to the desired position by _adjust_order.
instance = super().create(validated_data)
# Adjust order of the instance if necessary.
if order is not None:
self._adjust_order(instance, manual_order, order, created=True)
return instance
def update(self, instance, validated_data):
# Remove "manual_order" and "order" fields from validated_data, so they are not passed to update method.
manual_order = validated_data.pop("manual_order", False)
order = validated_data.pop("order", None)
# Adjust order of the instance if necessary.
if order is not None:
self._adjust_order(instance, manual_order, order, created=False)
# Proceed with the update.
return super().update(instance, validated_data)
@staticmethod
def _adjust_order(instance, manual_order, order, created):
# Passing order=-1 means that the policy should be moved to the end of the list.
# Works only for public API but not for Terraform provider.
if order == -1 and not manual_order:
if created:
# The policy was just created, so it is already at the end of the list.
return
order = instance.max_order()
# max_order() can't be None here because at least one instance exists – the one we are moving.
assert order is not None
# Check the order is in the valid range.
# https://docs.djangoproject.com/en/4.1/ref/models/fields/#positiveintegerfield
if order < 0 or order > 2147483647:
raise BadRequest(detail="Invalid value for position field")
# Orders are swapped instead of moved when using Terraform, because Terraform may issue concurrent requests
# to create / update / delete multiple policies. "Move to" operation is not deterministic in this case, and
# final order of policies may be different depending on the order in which requests are processed. On the other
# hand, the result of concurrent "swap" operations is deterministic and does not depend on the order in
# which requests are processed.
if manual_order:
instance.swap(order)
else:
instance.to(order)
|
gui | StateCache | """
Copyright 2007 Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
from . import Actions
from .Constants import STATE_CACHE_SIZE
class StateCache(object):
"""
The state cache is an interface to a list to record data/states and to revert to previous states.
States are recorded into the list in a circular fassion by using an index for the current state,
and counters for the range where states are stored.
"""
def __init__(self, initial_state):
"""
StateCache constructor.
Args:
initial_state: the initial state (nested data)
"""
self.states = [None] * STATE_CACHE_SIZE # fill states
self.current_state_index = 0
self.num_prev_states = 0
self.num_next_states = 0
self.states[0] = initial_state
self.update_actions()
def save_new_state(self, state):
"""
Save a new state.
Place the new state at the next index and add one to the number of previous states.
Args:
state: the new state
"""
self.current_state_index = (self.current_state_index + 1) % STATE_CACHE_SIZE
self.states[self.current_state_index] = state
self.num_prev_states = self.num_prev_states + 1
if self.num_prev_states == STATE_CACHE_SIZE:
self.num_prev_states = STATE_CACHE_SIZE - 1
self.num_next_states = 0
self.update_actions()
def get_current_state(self):
"""
Get the state at the current index.
Returns:
the current state (nested data)
"""
self.update_actions()
return self.states[self.current_state_index]
def get_prev_state(self):
"""
Get the previous state and decrement the current index.
Returns:
the previous state or None
"""
if self.num_prev_states > 0:
self.current_state_index = (
self.current_state_index + STATE_CACHE_SIZE - 1
) % STATE_CACHE_SIZE
self.num_next_states = self.num_next_states + 1
self.num_prev_states = self.num_prev_states - 1
return self.get_current_state()
return None
def get_next_state(self):
"""
Get the nest state and increment the current index.
Returns:
the next state or None
"""
if self.num_next_states > 0:
self.current_state_index = (self.current_state_index + 1) % STATE_CACHE_SIZE
self.num_next_states = self.num_next_states - 1
self.num_prev_states = self.num_prev_states + 1
return self.get_current_state()
return None
def update_actions(self):
"""
Update the undo and redo actions based on the number of next and prev states.
"""
Actions.FLOW_GRAPH_REDO.set_enabled(self.num_next_states != 0)
Actions.FLOW_GRAPH_UNDO.set_enabled(self.num_prev_states != 0)
|
libs | singleapplication | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#######################################################################
#
# VidCutter - media cutter & joiner
#
# copyright © 2018 Pete Alexandrou
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#######################################################################
import os
import sys
import vidcutter
from PyQt5.QtCore import (
QDir,
QFileInfo,
QProcessEnvironment,
QSettings,
Qt,
QTextStream,
pyqtSignal,
)
from PyQt5.QtNetwork import QLocalServer, QLocalSocket
from PyQt5.QtWidgets import QApplication
class SingleApplication(QApplication):
messageReceived = pyqtSignal(str)
def __init__(self, appid, *argv):
super(SingleApplication, self).__init__(*argv)
self._appid = appid
self._activationWindow = None
self._activateOnMessage = False
self._outSocket = QLocalSocket()
self._outSocket.connectToServer(self._appid)
self._isRunning = self._outSocket.waitForConnected()
self._outStream = None
self._inSocket = None
self._inStream = None
self._server = None
self.settings = QSettings(
SingleApplication.getSettingsPath(), QSettings.IniFormat
)
self.singleInstance = self.settings.value("singleInstance", "on", type=str) in {
"on",
"true",
}
if self._isRunning and self.singleInstance:
self._outStream = QTextStream(self._outSocket)
for a in argv[0][1:]:
a = os.path.join(os.getcwd(), a)
if os.path.isfile(a):
self.sendMessage(a)
break
sys.exit(0)
else:
error = self._outSocket.error()
if error == QLocalSocket.ConnectionRefusedError:
self.close()
QLocalServer.removeServer(self._appid)
self._outSocket = None
self._server = QLocalServer()
self._server.listen(self._appid)
self._server.newConnection.connect(self._onNewConnection)
def close(self):
if self._inSocket:
self._inSocket.disconnectFromServer()
if self._outSocket:
self._outSocket.disconnectFromServer()
if self._server:
self._server.close()
@staticmethod
def getSettingsPath() -> str:
if sys.platform == "win32":
settings_path = os.path.join(
QDir.homePath(), "AppData", "Local", "vidcutter"
)
elif sys.platform == "darwin":
settings_path = os.path.join(
QDir.homePath(), "Library", "Preferences", "vidcutter"
)
else:
if QFileInfo(__file__).absolutePath().startswith("/app/"):
settings_path = QProcessEnvironment.systemEnvironment().value(
"XDG_CONFIG_HOME", ""
)
if not len(settings_path):
settings_path = os.path.join(
QDir.homePath(),
".var",
"app",
vidcutter.__desktopid__,
"config",
)
else:
settings_path = os.path.join(QDir.homePath(), ".config", "vidcutter")
return os.path.join(settings_path, "vidcutter.ini")
def isRunning(self):
return self._isRunning
def appid(self):
return self._appid
def activationWindow(self):
return self._activationWindow
def setActivationWindow(self, activationWindow, activateOnMessage=True):
self._activationWindow = activationWindow
self._activateOnMessage = activateOnMessage
def activateWindow(self):
if not self._activationWindow:
return
self._activationWindow.setWindowState(
self._activationWindow.windowState() & ~Qt.WindowMinimized
)
self._activationWindow.raise_()
self._activationWindow.activateWindow()
def sendMessage(self, msg):
if not self._outStream:
return False
# noinspection PyUnresolvedReferences
self._outStream << msg << "\n"
self._outStream.flush()
return self._outSocket.waitForBytesWritten()
def _onNewConnection(self):
if self._inSocket:
self._inSocket.readyRead.disconnect(self._onReadyRead)
self._inSocket = self._server.nextPendingConnection()
if not self._inSocket:
return
self._inStream = QTextStream(self._inSocket)
self._inSocket.readyRead.connect(self._onReadyRead)
if self._activateOnMessage:
self.activateWindow()
def _onReadyRead(self):
while True:
msg = self._inStream.readLine()
if not msg:
break
self.messageReceived.emit(msg)
|
borg | logger | """logging facilities
The way to use this is as follows:
* each module declares its own logger, using:
from .logger import create_logger
logger = create_logger()
* then each module uses logger.info/warning/debug/etc according to the
level it believes is appropriate:
logger.debug('debugging info for developers or power users')
logger.info('normal, informational output')
logger.warning('warn about a non-fatal error or sth else')
logger.error('a fatal error')
... and so on. see the `logging documentation
<https://docs.python.org/3/howto/logging.html#when-to-use-logging>`_
for more information
* console interaction happens on stderr, that includes interactive
reporting functions like `help`, `info` and `list`
* ...except ``input()`` is special, because we can't control the
stream it is using, unfortunately. we assume that it won't clutter
stdout, because interaction would be broken then anyways
* what is output on INFO level is additionally controlled by commandline
flags
Logging setup is a bit complicated in borg, as it needs to work under misc. conditions:
- purely local, not client/server (easy)
- client/server: RemoteRepository ("borg serve" process) writes log records into a global
queue, which is then sent to the client side by the main serve loop (via the RPC protocol,
either over ssh stdout, more directly via process stdout without ssh [used in the tests]
or via a socket. On the client side, the log records are fed into the clientside logging
system. When remote_repo.close() is called, server side must send all queued log records
via the RPC channel before returning the close() call's return value (as the client will
then shut down the connection).
- progress output is always given as json to the logger (including the plain text inside
the json), but then formatted by the logging system's formatter as either plain text or
json depending on the cli args given (--log-json?).
- tests: potentially running in parallel via pytest-xdist, capturing borg output into a
given stream.
- logging might be short-lived (e.g. when invoking a single borg command via the cli)
or long-lived (e.g. borg serve --socket or when running the tests)
- logging is global and exists only once per process.
"""
import inspect
import json
import logging
import logging.config
import logging.handlers # needed for handlers defined there being configurable in logging.conf file
import os
import queue
import sys
import time
import warnings
from typing import Optional
logging_debugging_path: Optional[str] = None # if set, write borg.logger debugging log to path/borg-*.log
configured = False
borg_serve_log_queue: queue.SimpleQueue = queue.SimpleQueue()
class BorgQueueHandler(logging.handlers.QueueHandler):
"""borg serve writes log record dicts to a borg_serve_log_queue"""
def prepare(self, record: logging.LogRecord) -> dict:
return dict(
# kwargs needed for LogRecord constructor:
name=record.name,
level=record.levelno,
pathname=record.pathname,
lineno=record.lineno,
msg=record.msg,
args=record.args,
exc_info=record.exc_info,
func=record.funcName,
sinfo=record.stack_info,
)
class StderrHandler(logging.StreamHandler):
"""
This class is like a StreamHandler using sys.stderr, but always uses
whatever sys.stderr is currently set to rather than the value of
sys.stderr at handler construction time.
"""
def __init__(self, stream=None):
logging.Handler.__init__(self)
@property
def stream(self):
return sys.stderr
class TextProgressFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
# record.msg contains json (because we always do json for progress log)
j = json.loads(record.msg)
# inside the json, the text log line can be found under "message"
return f"{j['message']}"
class JSONProgressFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
# record.msg contains json (because we always do json for progress log)
return f"{record.msg}"
# use something like this to ignore warnings:
# warnings.filterwarnings('ignore', r'... regex for warning message to ignore ...')
def _log_warning(message, category, filename, lineno, file=None, line=None):
# for warnings, we just want to use the logging system, not stderr or other files
msg = f"{filename}:{lineno}: {category.__name__}: {message}"
logger = create_logger(__name__)
# Note: the warning will look like coming from here,
# but msg contains info about where it really comes from
logger.warning(msg)
def remove_handlers(logger):
for handler in logger.handlers[:]:
handler.flush()
handler.close()
logger.removeHandler(handler)
def flush_logging():
# make sure all log output is flushed,
# this is especially important for the "borg serve" RemoteRepository logging:
# all log output needs to be sent via the ssh / socket connection before closing it.
for logger_name in "borg.output.progress", "":
logger = logging.getLogger(logger_name)
for handler in logger.handlers:
handler.flush()
def setup_logging(
stream=None,
conf_fname=None,
env_var="BORG_LOGGING_CONF",
level="info",
is_serve=False,
log_json=False,
func=None,
):
"""setup logging module according to the arguments provided
if conf_fname is given (or the config file name can be determined via
the env_var, if given): load this logging configuration.
otherwise, set up a stream handler logger on stderr (by default, if no
stream is provided).
is_serve: are we setting up the logging for "borg serve"?
"""
global configured
err_msg = None
if env_var:
conf_fname = os.environ.get(env_var, conf_fname)
if conf_fname:
try:
conf_fname = os.path.abspath(conf_fname)
# we open the conf file here to be able to give a reasonable
# error message in case of failure (if we give the filename to
# fileConfig(), it silently ignores unreadable files and gives
# unhelpful error msgs like "No section: 'formatters'"):
with open(conf_fname) as f:
logging.config.fileConfig(f)
configured = True
logger = logging.getLogger(__name__)
logger.debug(f'using logging configuration read from "{conf_fname}"')
warnings.showwarning = _log_warning
return None
except Exception as err: # XXX be more precise
err_msg = str(err)
# if we did not / not successfully load a logging configuration, fallback to this:
level = level.upper()
fmt = "%(message)s"
formatter = JsonFormatter(fmt) if log_json else logging.Formatter(fmt)
SHandler = StderrHandler if stream is None else logging.StreamHandler
handler = BorgQueueHandler(borg_serve_log_queue) if is_serve else SHandler(stream)
handler.setFormatter(formatter)
logger = logging.getLogger()
remove_handlers(logger)
logger.setLevel(level)
if logging_debugging_path is not None:
# add an addtl. root handler for debugging purposes
log_fname = os.path.join(logging_debugging_path, f"borg-{'serve' if is_serve else 'client'}-root.log")
handler2 = logging.StreamHandler(open(log_fname, "a"))
handler2.setFormatter(formatter)
logger.addHandler(handler2)
logger.warning(f"--- {func} ---") # only handler2 shall get this
logger.addHandler(handler) # do this late, so handler is not added while debug handler is set up
bop_formatter = JSONProgressFormatter() if log_json else TextProgressFormatter()
bop_handler = BorgQueueHandler(borg_serve_log_queue) if is_serve else SHandler(stream)
bop_handler.setFormatter(bop_formatter)
bop_logger = logging.getLogger("borg.output.progress")
remove_handlers(bop_logger)
bop_logger.setLevel("INFO")
bop_logger.propagate = False
if logging_debugging_path is not None:
# add an addtl. progress handler for debugging purposes
log_fname = os.path.join(
logging_debugging_path,
f"borg-{'serve' if is_serve else 'client'}-progress.log",
)
bop_handler2 = logging.StreamHandler(open(log_fname, "a"))
bop_handler2.setFormatter(bop_formatter)
bop_logger.addHandler(bop_handler2)
json_dict = dict(
message=f"--- {func} ---",
operation=0,
msgid="",
type="progress_message",
finished=False,
time=time.time(),
)
bop_logger.warning(json.dumps(json_dict)) # only bop_handler2 shall get this
bop_logger.addHandler(bop_handler) # do this late, so bop_handler is not added while debug handler is set up
configured = True
logger = logging.getLogger(__name__)
if err_msg:
logger.warning(f'setup_logging for "{conf_fname}" failed with "{err_msg}".')
logger.debug("using builtin fallback logging configuration")
warnings.showwarning = _log_warning
return handler
def find_parent_module():
"""find the name of the first module calling this module
if we cannot find it, we return the current module's name
(__name__) instead.
"""
try:
frame = inspect.currentframe().f_back
module = inspect.getmodule(frame)
while module is None or module.__name__ == __name__:
frame = frame.f_back
module = inspect.getmodule(frame)
return module.__name__
except AttributeError:
# somehow we failed to find our module
# return the logger module name by default
return __name__
class LazyLogger:
def __init__(self, name=None):
self.__name = name or find_parent_module()
self.__real_logger = None
@property
def __logger(self):
if self.__real_logger is None:
if not configured:
raise Exception("tried to call a logger before setup_logging() was called")
self.__real_logger = logging.getLogger(self.__name)
if self.__name.startswith("borg.debug.") and self.__real_logger.level == logging.NOTSET:
self.__real_logger.setLevel("WARNING")
return self.__real_logger
def getChild(self, suffix):
return LazyLogger(self.__name + "." + suffix)
def setLevel(self, *args, **kw):
return self.__logger.setLevel(*args, **kw)
def log(self, *args, **kw):
if "msgid" in kw:
kw.setdefault("extra", {})["msgid"] = kw.pop("msgid")
return self.__logger.log(*args, **kw)
def exception(self, *args, **kw):
if "msgid" in kw:
kw.setdefault("extra", {})["msgid"] = kw.pop("msgid")
return self.__logger.exception(*args, **kw)
def debug(self, *args, **kw):
if "msgid" in kw:
kw.setdefault("extra", {})["msgid"] = kw.pop("msgid")
return self.__logger.debug(*args, **kw)
def info(self, *args, **kw):
if "msgid" in kw:
kw.setdefault("extra", {})["msgid"] = kw.pop("msgid")
return self.__logger.info(*args, **kw)
def warning(self, *args, **kw):
if "msgid" in kw:
kw.setdefault("extra", {})["msgid"] = kw.pop("msgid")
return self.__logger.warning(*args, **kw)
def error(self, *args, **kw):
if "msgid" in kw:
kw.setdefault("extra", {})["msgid"] = kw.pop("msgid")
return self.__logger.error(*args, **kw)
def critical(self, *args, **kw):
if "msgid" in kw:
kw.setdefault("extra", {})["msgid"] = kw.pop("msgid")
return self.__logger.critical(*args, **kw)
def create_logger(name: str = None) -> LazyLogger:
"""lazily create a Logger object with the proper path, which is returned by
find_parent_module() by default, or is provided via the commandline
this is really a shortcut for:
logger = logging.getLogger(__name__)
we use it to avoid errors and provide a more standard API.
We must create the logger lazily, because this is usually called from
module level (and thus executed at import time - BEFORE setup_logging()
was called). By doing it lazily we can do the setup first, we just have to
be careful not to call any logger methods before the setup_logging() call.
If you try, you'll get an exception.
"""
return LazyLogger(name)
class JsonFormatter(logging.Formatter):
RECORD_ATTRIBUTES = (
"levelname",
"name",
"message",
# msgid is an attribute we made up in Borg to expose a non-changing handle for log messages
"msgid",
)
# Other attributes that are not very useful but do exist:
# processName, process, relativeCreated, stack_info, thread, threadName
# msg == message
# *args* are the unformatted arguments passed to the logger function, not useful now,
# become useful if sanitized properly (must be JSON serializable) in the code +
# fixed message IDs are assigned.
# exc_info, exc_text are generally uninteresting because the message will have that
def format(self, record):
super().format(record)
data = {
"type": "log_message",
"time": record.created,
"message": "",
"levelname": "CRITICAL",
}
for attr in self.RECORD_ATTRIBUTES:
value = getattr(record, attr, None)
if value:
data[attr] = value
return json.dumps(data)
|
printdlg | panels | # -*- coding: utf-8 -*-
#
# Copyright (C) 2016 by Ihor E. Novikov
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import wal
from sk1 import _
from sk1.printing import printout, prn_events
from sk1.printing.generic import MONOCHROME_MODE
from sk1.resources import get_icon, icons
SPACER = (10, 10)
class FLabeledPanel(wal.VPanel):
def __init__(self, parent, label="CAPTION"):
self.title = label.upper()
wal.VPanel.__init__(self, parent)
hpanel = wal.HPanel(self)
hpanel.pack(SPACER)
self.cont = wal.VPanel(hpanel)
fontsize = 1 if wal.IS_WX4 else 3
self.cont.pack(
wal.Label(self.cont, self.title, fontsize=fontsize),
padding=5,
align_center=False,
)
self.build()
hpanel.pack(self.cont, fill=True, expand=True)
hpanel.pack(SPACER)
self.pack(hpanel, fill=True)
self.pack(SPACER)
self.pack(wal.HLine(self), fill=True)
def build(self):
pass
class CopiesPanel(FLabeledPanel):
def __init__(self, parent, printer, printout):
self.printer = printer
self.printout = printout
self.icons = {
"00": get_icon(icons.PD_PRINT_COPIES_00, size=wal.DEF_SIZE),
"10": get_icon(icons.PD_PRINT_COPIES_10, size=wal.DEF_SIZE),
"01": get_icon(icons.PD_PRINT_COPIES_01, size=wal.DEF_SIZE),
"11": get_icon(icons.PD_PRINT_COPIES_11, size=wal.DEF_SIZE),
}
FLabeledPanel.__init__(self, parent, _("Copies"))
self.copies_changed()
prn_events.connect(prn_events.PRINTER_CHANGED, self.on_printer_change)
prn_events.connect(prn_events.PRINTOUT_MODIFIED, self.copies_changed)
def build(self):
hpanel = wal.HPanel(self)
title = _("Number of copies:")
hpanel.pack(wal.Label(hpanel, title), padding=5)
self.num_copies = wal.IntSpin(
hpanel, 1, (1, 9999), onchange=self.copies_changed
)
hpanel.pack(self.num_copies)
self.cont.pack(hpanel)
self.indicator = wal.Bitmap(hpanel, self.icons["00"])
self.cont.pack(self.indicator, padding=5)
hpanel = wal.HPanel(self)
self.collate = wal.Checkbox(hpanel, _("Collate"), onclick=self.flag_changed)
hpanel.pack(self.collate)
hpanel.pack(SPACER)
self.reverse = wal.Checkbox(hpanel, _("Reverse"), onclick=self.flag_changed)
hpanel.pack(self.reverse)
self.cont.pack(hpanel)
def copies_changed(self):
copies = self.num_copies.get_value()
pages = self.printout.get_num_print_pages()
state = False
if pages > 1:
state = True
ctrls = [self.collate, self.reverse, self.indicator]
for item in ctrls:
item.set_enable(state)
if not state:
self.collate.set_value(False)
self.reverse.set_value(False)
if copies == 1:
self.collate.set_value(False)
self.collate.set_enable(False)
self.update()
def flag_changed(self):
icon_key = str(int(self.collate.get_value()))
icon_key += str(int(self.reverse.get_value()))
self.indicator.set_bitmap(self.icons[icon_key])
self.update()
def on_printer_change(self, printer):
self.printer = printer
if self.printer and self.printer.is_virtual():
self.num_copies.set_enable(False)
self.num_copies.set_value(1)
self.collate.set_enable(False)
self.collate.set_value(False)
self.update()
def update(self):
if self.printer and not self.printer.is_virtual():
self.num_copies.set_enable(True)
self.printer.set_copies(self.num_copies.get_value())
self.printer.set_collate(self.collate.get_value())
self.printout.set_reverse(self.reverse.get_value())
class PageRangePanel(FLabeledPanel):
def __init__(self, parent, printout):
self.printout = printout
FLabeledPanel.__init__(self, parent, _("Page range"))
def build(self):
grid = wal.GridPanel(self.cont, 8, 1, 5, 15)
grid.add_growable_col(0)
self.all_opt = wal.Radiobutton(grid, _("All"), group=True, onclick=self.update)
self.sel_opt = wal.Radiobutton(grid, _("Selection"), onclick=self.update)
self.cpage_opt = wal.Radiobutton(grid, _("Current page"), onclick=self.update)
self.pages_opt = wal.Radiobutton(grid, _("Pages:"), onclick=self.update)
self.pages_entry = wal.Entry(grid, "1", onchange=self.pages_changed)
grid.pack(self.all_opt)
grid.pack(self.sel_opt)
grid.pack(self.cpage_opt)
grid.pack(self.pages_opt)
grid.pack(self.pages_entry, fill=True)
title = _("Enter page numbers or page ranges.")
title += "\n" + _("For example: 1,2,5-6")
grid.pack(wal.Label(self, title, fontsize=-1))
self.cont.pack(grid, fill=True, padding_all=5)
self.pages_entry.set_enable(False)
self.all_opt.set_value(True)
if not self.printout.is_selection():
self.sel_opt.set_enable(False)
if not self.printout.get_num_pages() > 1:
self.cpage_opt.set_enable(False)
self.pages_opt.set_enable(False)
def pages_changed(self):
txt = self.pages_entry.get_value()
pos = self.pages_entry.get_cursor_pos()
chars = ",0123456789-"
res = ""
if not txt or txt == "0":
res = "1"
txt = ""
for item in txt:
if item in chars:
res += item
if txt == res:
self.pages_entry.set_value(res)
self.pages_entry.set_cursor_pos(pos)
self.update()
else:
self.pages_entry.set_value(res)
def get_page_range(self):
txt = self.pages_entry.get_value()
vals = txt.split(",")
ret = []
pages_range = range(self.printout.get_num_pages())
for item in vals:
if not item:
continue
if "-" in item:
rngs = item.split("-")
int_rngs = []
for rng in rngs:
if rng:
int_rngs.append(int(rng) - 1)
if len(int_rngs) == 1:
if int_rngs[0] in pages_range:
ret.append(int_rngs[0])
elif len(int_rngs) > 1:
pages = range(int_rngs[0], int_rngs[-1] + 1)
for page in pages:
if page in pages_range:
ret.append(page)
else:
val = int(item) - 1
if val in pages_range:
ret.append(val)
return ret
def update(self):
self.pages_entry.set_enable(self.pages_opt.get_value())
print_range = printout.PRINT_ALL
page_range = []
if self.all_opt.get_value():
print_range = printout.PRINT_ALL
elif self.sel_opt.get_value():
print_range = printout.PRINT_SELECTION
elif self.cpage_opt.get_value():
print_range = printout.PRINT_CURRENT_PAGE
elif self.pages_opt.get_value():
print_range = printout.PRINT_PAGE_RANGE
page_range = self.get_page_range()
self.printout.set_print_range(print_range, page_range)
prn_events.emit(prn_events.PRINTOUT_MODIFIED)
class PrintModePanel(FLabeledPanel):
def __init__(self, parent, printer):
self.printer = printer
FLabeledPanel.__init__(self, parent, _("Print mode"))
self.on_printer_changed(self.printer)
prn_events.connect(prn_events.PRINTER_CHANGED, self.on_printer_changed)
prn_events.connect(prn_events.PRINTER_MODIFIED, self.on_printer_modified)
def build(self):
grid = wal.GridPanel(self.cont, 2, 3, 5, 5)
self.mono_opt = wal.Radiobutton(
grid, _("Monochrome"), group=True, onclick=self.update
)
icon = get_icon(icons.PD_PRINTMODE_MONO, size=wal.DEF_SIZE)
self.mono_bmp = wal.Bitmap(grid, icon)
grid.pack(SPACER)
grid.pack(self.mono_bmp)
grid.pack(self.mono_opt)
self.color_opt = wal.Radiobutton(grid, _("Color"), onclick=self.update)
icon = get_icon(icons.PD_PRINTMODE_COLOR, size=wal.DEF_SIZE)
self.color_bmp = wal.Bitmap(grid, icon)
grid.pack(SPACER)
grid.pack(self.color_bmp)
grid.pack(self.color_opt)
self.cont.pack(grid, align_center=False)
def update(self):
self.printer.set_color_mode(self.mono_opt.get_value() is False)
prn_events.emit(prn_events.PRINTER_MODIFIED)
def on_printer_changed(self, printer):
self.printer = printer
self.color_opt.set_enable(self.printer.is_color())
self.on_printer_modified()
def on_printer_modified(self):
if self.printer.color_mode == MONOCHROME_MODE:
self.mono_opt.set_value(True)
else:
self.color_opt.set_value(True)
class PrinterPanel(FLabeledPanel):
ready_flag = False
def __init__(self, parent, dlg, printsys):
self.printsys = printsys
self.dlg = dlg
self.printer = self.printsys.get_default_printer()
FLabeledPanel.__init__(self, parent, _("Printer"))
def build(self):
plist = self.printsys.get_printer_names()
self.prn_list = wal.Combolist(
self.cont, items=plist, onchange=self.on_printer_change
)
self.prn_list.set_active(plist.index(self.printer.get_name()))
self.cont.pack(self.prn_list, fill=True, expand=True)
self.cont.pack(SPACER)
hpanel = wal.HPanel(self.cont)
hpanel.pack((1, 1), fill=True, expand=True)
self.print_btn = wal.Button(hpanel, _("Print"), onclick=self.dlg.on_print)
hpanel.pack(self.print_btn)
self.cont.pack(hpanel, fill=True)
self.ready_flag = True
def on_printer_change(self):
if not self.ready_flag:
return
name = self.prn_list.get_active_value()
self.printer = self.printsys.get_printer_by_name(name)
prn_events.emit(prn_events.PRINTER_CHANGED, self.printer)
|
PyObjCTest | test_nscolor | import array
import sys
from AppKit import *
from PyObjCTools.TestSupport import *
try:
unicode
except NameError:
unicode = str
class TestRegressions(TestCase):
def testQualifiersInSignature(self):
NSColor.redColor().getRed_green_blue_alpha_(None, None, None, None)
def testMethods(self):
self.assertResultIsBOOL(NSColor.ignoresAlpha)
self.assertArgIsBOOL(NSColor.setIgnoresAlpha_, 0)
space = NSColorSpace.adobeRGB1998ColorSpace()
color = NSColor.colorWithColorSpace_components_count_(
space, (0.1, 0.2, 0.3, 0.4), 4
)
self.assertIsInstance(color, NSColor)
color = NSColor.colorWithCalibratedRed_green_blue_alpha_(0, 0, 0, 0)
r, g, b, a = color.getRed_green_blue_alpha_(None, None, None, None)
self.assertIsInstance(r, float)
self.assertIsInstance(g, float)
self.assertIsInstance(b, float)
self.assertIsInstance(a, float)
color = NSColor.colorWithCalibratedHue_saturation_brightness_alpha_(
0.1, 0.2, 0.3, 0.4
)
h, s, b, a = color.getHue_saturation_brightness_alpha_(None, None, None, None)
self.assertIsInstance(h, float)
self.assertIsInstance(s, float)
self.assertIsInstance(b, float)
self.assertIsInstance(a, float)
color = NSColor.colorWithCalibratedWhite_alpha_(0.1, 0.2)
w, a = color.getWhite_alpha_(None, None)
self.assertIsInstance(w, float)
self.assertIsInstance(a, float)
color = NSColor.colorWithDeviceCyan_magenta_yellow_black_alpha_(1, 1, 1, 1, 1)
c, m, y, b, a = color.getCyan_magenta_yellow_black_alpha_(
None, None, None, None, None
)
self.assertIsInstance(c, float)
self.assertIsInstance(m, float)
self.assertIsInstance(y, float)
self.assertIsInstance(b, float)
self.assertIsInstance(a, float)
if sys.maxsize > 2**32:
a = array.array("d", [0] * 6)
else:
a = array.array("f", [0] * 6)
v = color.getComponents_(a)
self.assertEqual(a[0], 1.0)
def testConstants(self):
self.assertIsInstance(NSSystemColorsDidChangeNotification, unicode)
self.assertEqual(NSAppKitVersionNumberWithPatternColorLeakFix, 641.0)
if __name__ == "__main__":
main()
|
versions | 46a278193a94_enable_millisecond_precision_in_mysql_ | """Enable millisecond precision in MySQL datetime
Revision ID: 46a278193a94
Revises: 4d3c1b59d011
Create Date: 2022-11-01 23:27:44.620893
"""
from alembic import op # noqa: I001
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "46a278193a94"
down_revision = "4d3c1b59d011"
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
url = str(bind.engine.url)
if url.startswith("mysql"):
get_columns = "SELECT `TABLE_NAME`, `COLUMN_NAME` FROM `information_schema`.`COLUMNS` WHERE `table_schema`=DATABASE() AND `DATA_TYPE`='datetime' AND `COLUMN_TYPE`='datetime';"
conn = op.get_bind()
columns = conn.execute(get_columns).fetchall()
for table_name, column_name in columns:
op.alter_column(
table_name=table_name,
column_name=column_name,
type_=mysql.DATETIME(fsp=6),
)
def downgrade():
bind = op.get_bind()
url = str(bind.engine.url)
if url.startswith("mysql"):
get_columns = "SELECT `TABLE_NAME`, `COLUMN_NAME` FROM `information_schema`.`COLUMNS` WHERE `table_schema`=DATABASE() AND `DATA_TYPE`='datetime' AND `COLUMN_TYPE`='datetime(6)';"
conn = op.get_bind()
columns = conn.execute(get_columns).fetchall()
for table_name, column_name in columns:
op.alter_column(
table_name=table_name,
column_name=column_name,
type_=mysql.DATETIME(fsp=0),
)
|
core | Connection | """
Copyright 2008-2015 Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
from .base import Element
from .Constants import ALIASES_OF
from .utils.descriptors import lazy_property
class Connection(Element):
is_connection = True
def __init__(self, parent, source, sink):
"""
Make a new connection given the parent and 2 ports.
Args:
flow_graph: the parent of this element
source: a port (any direction)
sink: a port (any direction)
@throws Error cannot make connection
Returns:
a new connection
"""
Element.__init__(self, parent)
if not source.is_source:
source, sink = sink, source
if not source.is_source:
raise ValueError("Connection could not isolate source")
if not sink.is_sink:
raise ValueError("Connection could not isolate sink")
self.source_port = source
self.sink_port = sink
def __str__(self):
return "Connection (\n\t{}\n\t\t{}\n\t{}\n\t\t{}\n)".format(
self.source_block,
self.source_port,
self.sink_block,
self.sink_port,
)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.source_port == other.source_port and self.sink_port == other.sink_port
)
def __hash__(self):
return hash((self.source_port, self.sink_port))
def __iter__(self):
return iter((self.source_port, self.sink_port))
@lazy_property
def source_block(self):
return self.source_port.parent_block
@lazy_property
def sink_block(self):
return self.sink_port.parent_block
@lazy_property
def type(self):
return self.source_port.domain, self.sink_port.domain
@property
def enabled(self):
"""
Get the enabled state of this connection.
Returns:
true if source and sink blocks are enabled
"""
return self.source_block.enabled and self.sink_block.enabled
def validate(self):
"""
Validate the connections.
The ports must match in io size.
"""
Element.validate(self)
platform = self.parent_platform
if self.type not in platform.connection_templates:
self.add_error_message(
'No connection known between domains "{}" and "{}"' "".format(
*self.type
)
)
source_dtype = self.source_port.dtype
sink_dtype = self.sink_port.dtype
if source_dtype != sink_dtype and source_dtype not in ALIASES_OF.get(
sink_dtype, set()
):
self.add_error_message(
'Source IO type "{}" does not match sink IO type "{}".'.format(
source_dtype, sink_dtype
)
)
source_size = self.source_port.item_size
sink_size = self.sink_port.item_size
if source_size != sink_size:
self.add_error_message(
'Source IO size "{}" does not match sink IO size "{}".'.format(
source_size, sink_size
)
)
##############################################
# Import/Export Methods
##############################################
def export_data(self):
"""
Export this connection's info.
Returns:
a nested data odict
"""
return (
self.source_block.name,
self.source_port.key,
self.sink_block.name,
self.sink_port.key,
)
|
TDTest | DrawHatchTest | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import FreeCAD
class DrawHatchTest(unittest.TestCase):
def setUp(self):
"""Creates a page and view"""
self.path = os.path.dirname(os.path.abspath(__file__))
print("TDHatch path: " + self.path)
templateFileSpec = self.path + "/TestTemplate.svg"
FreeCAD.newDocument("TDHatch")
FreeCAD.setActiveDocument("TDHatch")
FreeCAD.ActiveDocument = FreeCAD.getDocument("TDHatch")
# make source feature
box = FreeCAD.ActiveDocument.addObject("Part::Box", "Box")
# make a page
self.page = FreeCAD.ActiveDocument.addObject("TechDraw::DrawPage", "Page")
FreeCAD.ActiveDocument.addObject("TechDraw::DrawSVGTemplate", "Template")
FreeCAD.ActiveDocument.Template.Template = templateFileSpec
FreeCAD.ActiveDocument.Page.Template = FreeCAD.ActiveDocument.Template
self.page.Scale = 5.0
# page.ViewObject.show() #unit tests run in console mode
# make Views
self.view = FreeCAD.ActiveDocument.addObject("TechDraw::DrawViewPart", "View")
FreeCAD.ActiveDocument.View.Source = [box]
self.page.addView(self.view)
FreeCAD.ActiveDocument.recompute()
def tearDown(self):
FreeCAD.closeDocument("TDHatch")
def testMakeHatchCase(self):
"""Tests if hatch area can be added to view"""
# make hatch
print("making hatch")
hatch = FreeCAD.ActiveDocument.addObject("TechDraw::DrawHatch", "Hatch")
hatch.Source = (self.view, ["Face0"])
hatchFileSpec = self.path + "/TestHatch.svg"
# comment out to use default from preferences
hatch.HatchPattern = hatchFileSpec
print("finished hatch")
FreeCAD.ActiveDocument.recompute()
self.assertTrue("Up-to-date" in hatch.State)
if __name__ == "__main__":
unittest.main()
|
event | model | import datetime
import sys
from pathlib import Path
from photonix.photos.utils.metadata import PhotoMetadata, parse_datetime
class EventModel:
version = 20210505
approx_ram_mb = 120
max_num_workers = 2
def predict(self, image_file):
metadata = PhotoMetadata(image_file)
date_taken = None
possible_date_keys = [
"Date/Time Original",
"Date Time Original",
"Date/Time",
"Date Time",
"GPS Date/Time",
"Modify Date",
"File Modification Date/Time",
]
for date_key in possible_date_keys:
date_taken = parse_datetime(metadata.get(date_key))
if date_taken:
events = {
datetime.date(date_taken.year, 12, 25): "Christmas Day",
datetime.date(date_taken.year, 10, 31): "Halloween",
datetime.date(date_taken.year, 2, 14): "Valentine's Day",
datetime.date(date_taken.year, 12, 31): "New Year Start",
datetime.date(date_taken.year, 1, 1): "New Year End",
}
if events.get(date_taken.date()):
if events.get(date_taken.date()).startswith("New Year"):
start_of_day = datetime.datetime.combine(
datetime.date(date_taken.year, 12, 31),
datetime.datetime.min.time(),
)
end_of_day = start_of_day + datetime.timedelta(days=1)
if (
start_of_day
<= date_taken.replace(tzinfo=None)
<= end_of_day
):
return ["New Year"]
return [events.get(date_taken.date())]
return []
def run_on_photo(photo_id):
model = EventModel()
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from photonix.classifiers.runners import (
get_or_create_tag,
results_for_model_on_photo,
)
photo, results = results_for_model_on_photo(model, photo_id)
if photo:
from photonix.photos.models import PhotoTag
photo.clear_tags(source="C", type="E")
for name in results:
tag = get_or_create_tag(
library=photo.library, name=name, type="E", source="C"
)
PhotoTag(
photo=photo, tag=tag, source="C", confidence=0.5, significance=0.5
).save()
return photo, results
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Argument required: image file path")
exit(1)
_, results = run_on_photo(sys.argv[1])
print(results)
|
scripts | msgfmt | #! /usr/bin/env python3
# Written by Martin v. Löwis <loewis@informatik.hu-berlin.de>
"""Generate binary message catalog from textual translation description.
This program converts a textual Uniforum-style message catalog (.po file) into
a binary GNU catalog (.mo file). This is essentially the same function as the
GNU msgfmt program, however, it is a simpler implementation.
Usage: msgfmt.py [OPTIONS] filename.po
Options:
-o file
--output-file=file
Specify the output file to write to. If omitted, output will go to a
file named filename.mo (based off the input file name).
-h
--help
Print this message and exit.
-V
--version
Display version information and exit.
"""
import array
import ast
import getopt
import os
import struct
import sys
from email.parser import HeaderParser
__version__ = "1.1"
MESSAGES = {}
def usage(code, msg=""):
print(__doc__, file=sys.stderr)
if msg:
print(msg, file=sys.stderr)
sys.exit(code)
def add(id, str, fuzzy):
"Add a non-fuzzy translation to the dictionary."
global MESSAGES
if not fuzzy and str:
MESSAGES[id] = str
def generate():
"Return the generated output."
global MESSAGES
# the keys are sorted in the .mo file
keys = sorted(MESSAGES.keys())
offsets = []
ids = strs = b""
for id in keys:
# For each string, we need size and file offset. Each string is NUL
# terminated; the NUL does not count into the size.
offsets.append((len(ids), len(id), len(strs), len(MESSAGES[id])))
ids += id + b"\0"
strs += MESSAGES[id] + b"\0"
output = ""
# The header is 7 32-bit unsigned integers. We don't use hash tables, so
# the keys start right after the index tables.
# translated string.
keystart = 7 * 4 + 16 * len(keys)
# and the values start after the keys
valuestart = keystart + len(ids)
koffsets = []
voffsets = []
# The string table first has the list of keys, then the list of values.
# Each entry has first the size of the string, then the file offset.
for o1, l1, o2, l2 in offsets:
koffsets += [l1, o1 + keystart]
voffsets += [l2, o2 + valuestart]
offsets = koffsets + voffsets
output = struct.pack(
"Iiiiiii",
0x950412DE, # Magic
0, # Version
len(keys), # # of entries
7 * 4, # start of key index
7 * 4 + len(keys) * 8, # start of value index
0,
0,
) # size and offset of hash table
try:
output += array.array("i", offsets).tostring()
except AttributeError:
output += array.array("i", offsets).tobytes()
output += ids
output += strs
return output
def reset():
"Reset module state."
global MESSAGES
MESSAGES.clear()
def make(filename, outfile):
ID = 1
STR = 2
# Compute .mo name from .po name and arguments
if filename.endswith(".po"):
infile = filename
else:
infile = filename + ".po"
if outfile is None:
outfile = os.path.splitext(infile)[0] + ".mo"
try:
lines = open(infile, "rb").readlines()
except IOError as msg:
print(msg, file=sys.stderr)
sys.exit(1)
section = None
fuzzy = 0
# Start off assuming Latin-1, so everything decodes without failure,
# until we know the exact encoding
encoding = "latin-1"
# Parse the catalog
lno = 0
for l in lines:
l = l.decode(encoding)
lno += 1
# If we get a comment line after a msgstr, this is a new entry
if l[0] == "#" and section == STR:
add(msgid, msgstr, fuzzy)
section = None
fuzzy = 0
# Record a fuzzy mark
if l[:2] == "#," and "fuzzy" in l:
fuzzy = 1
# Skip comments
if l[0] == "#":
continue
# Now we are in a msgid section, output previous section
if l.startswith("msgid") and not l.startswith("msgid_plural"):
if section == STR:
add(msgid, msgstr, fuzzy)
if not msgid:
# See whether there is an encoding declaration
p = HeaderParser()
charset = p.parsestr(msgstr.decode(encoding)).get_content_charset()
if charset:
encoding = charset
section = ID
l = l[5:]
msgid = msgstr = b""
is_plural = False
# This is a message with plural forms
elif l.startswith("msgid_plural"):
if section != ID:
print(
"msgid_plural not preceded by msgid on %s:%d" % (infile, lno),
file=sys.stderr,
)
sys.exit(1)
l = l[12:]
msgid += b"\0" # separator of singular and plural
is_plural = True
# Now we are in a msgstr section
elif l.startswith("msgstr"):
section = STR
if l.startswith("msgstr["):
if not is_plural:
print(
"plural without msgid_plural on %s:%d" % (infile, lno),
file=sys.stderr,
)
sys.exit(1)
l = l.split("]", 1)[1]
if msgstr:
msgstr += b"\0" # Separator of the various plural forms
else:
if is_plural:
print(
"indexed msgstr required for plural on %s:%d" % (infile, lno),
file=sys.stderr,
)
sys.exit(1)
l = l[6:]
# Skip empty lines
l = l.strip()
if not l:
continue
l = ast.literal_eval(l)
if section == ID:
msgid += l.encode(encoding)
elif section == STR:
msgstr += l.encode(encoding)
else:
print("Syntax error on %s:%d" % (infile, lno), "before:", file=sys.stderr)
print(l, file=sys.stderr)
sys.exit(1)
# Add last entry
if section == STR:
add(msgid, msgstr, fuzzy)
# Compute output
output = generate()
try:
open(outfile, "wb").write(output)
except IOError as msg:
print(msg, file=sys.stderr)
def main():
try:
opts, args = getopt.getopt(
sys.argv[1:], "hVo:", ["help", "version", "output-file="]
)
except getopt.error as msg:
usage(1, msg)
outfile = None
# parse options
for opt, arg in opts:
if opt in ("-h", "--help"):
usage(0)
elif opt in ("-V", "--version"):
print("msgfmt.py", __version__)
sys.exit(0)
elif opt in ("-o", "--output-file"):
outfile = arg
# do it
if not args:
print("No input file given", file=sys.stderr)
print("Try `msgfmt --help' for more information.", file=sys.stderr)
return
for filename in args:
make(filename, outfile)
if __name__ == "__main__":
main()
|
lib | profiler | """Profiler tools for CherryPy.
CherryPy users
==============
You can profile any of your pages as follows::
from cherrypy.lib import profiler
class Root:
p = profile.Profiler("/path/to/profile/dir")
def index(self):
self.p.run(self._index)
index.exposed = True
def _index(self):
return "Hello, world!"
cherrypy.tree.mount(Root())
You can also turn on profiling for all requests
using the ``make_app`` function as WSGI middleware.
CherryPy developers
===================
This module can be used whenever you make changes to CherryPy,
to get a quick sanity-check on overall CP performance. Use the
``--profile`` flag when running the test suite. Then, use the ``serve()``
function to browse the results in a web browser. If you run this
module from the command line, it will call ``serve()`` for you.
"""
def new_func_strip_path(func_name):
"""Make profiler output more readable by adding `__init__` modules' parents"""
filename, line, name = func_name
if filename.endswith("__init__.py"):
return os.path.basename(filename[:-12]) + filename[-12:], line, name
return os.path.basename(filename), line, name
try:
import profile
import pstats
pstats.func_strip_path = new_func_strip_path
except ImportError:
profile = None
pstats = None
import os
import os.path
import sys
import warnings
from cherrypy._cpcompat import StringIO
_count = 0
class Profiler(object):
def __init__(self, path=None):
if not path:
path = os.path.join(os.path.dirname(__file__), "profile")
self.path = path
if not os.path.exists(path):
os.makedirs(path)
def run(self, func, *args, **params):
"""Dump profile data into self.path."""
global _count
c = _count = _count + 1
path = os.path.join(self.path, "cp_%04d.prof" % c)
prof = profile.Profile()
result = prof.runcall(func, *args, **params)
prof.dump_stats(path)
return result
def statfiles(self):
""":rtype: list of available profiles."""
return [
f
for f in os.listdir(self.path)
if f.startswith("cp_") and f.endswith(".prof")
]
def stats(self, filename, sortby="cumulative"):
""":rtype stats(index): output of print_stats() for the given profile."""
sio = StringIO()
if sys.version_info >= (2, 5):
s = pstats.Stats(os.path.join(self.path, filename), stream=sio)
s.strip_dirs()
s.sort_stats(sortby)
s.print_stats()
else:
# pstats.Stats before Python 2.5 didn't take a 'stream' arg,
# but just printed to stdout. So re-route stdout.
s = pstats.Stats(os.path.join(self.path, filename))
s.strip_dirs()
s.sort_stats(sortby)
oldout = sys.stdout
try:
sys.stdout = sio
s.print_stats()
finally:
sys.stdout = oldout
response = sio.getvalue()
sio.close()
return response
def index(self):
return """<html>
<head><title>CherryPy profile data</title></head>
<frameset cols='200, 1*'>
<frame src='menu' />
<frame name='main' src='' />
</frameset>
</html>
"""
index.exposed = True
def menu(self):
yield "<h2>Profiling runs</h2>"
yield "<p>Click on one of the runs below to see profiling data.</p>"
runs = self.statfiles()
runs.sort()
for i in runs:
yield "<a href='report?filename=%s' target='main'>%s</a><br />" % (i, i)
menu.exposed = True
def report(self, filename):
import cherrypy
cherrypy.response.headers["Content-Type"] = "text/plain"
return self.stats(filename)
report.exposed = True
class ProfileAggregator(Profiler):
def __init__(self, path=None):
Profiler.__init__(self, path)
global _count
self.count = _count = _count + 1
self.profiler = profile.Profile()
def run(self, func, *args, **params):
path = os.path.join(self.path, "cp_%04d.prof" % self.count)
result = self.profiler.runcall(func, *args, **params)
self.profiler.dump_stats(path)
return result
class make_app:
def __init__(self, nextapp, path=None, aggregate=False):
"""Make a WSGI middleware app which wraps 'nextapp' with profiling.
nextapp
the WSGI application to wrap, usually an instance of
cherrypy.Application.
path
where to dump the profiling output.
aggregate
if True, profile data for all HTTP requests will go in
a single file. If False (the default), each HTTP request will
dump its profile data into a separate file.
"""
if profile is None or pstats is None:
msg = (
"Your installation of Python does not have a profile "
"module. If you're on Debian, try "
"`sudo apt-get install python-profiler`. "
"See http://www.cherrypy.org/wiki/ProfilingOnDebian "
"for details."
)
warnings.warn(msg)
self.nextapp = nextapp
self.aggregate = aggregate
if aggregate:
self.profiler = ProfileAggregator(path)
else:
self.profiler = Profiler(path)
def __call__(self, environ, start_response):
def gather():
result = []
for line in self.nextapp(environ, start_response):
result.append(line)
return result
return self.profiler.run(gather)
def serve(path=None, port=8080):
if profile is None or pstats is None:
msg = (
"Your installation of Python does not have a profile module. "
"If you're on Debian, try "
"`sudo apt-get install python-profiler`. "
"See http://www.cherrypy.org/wiki/ProfilingOnDebian "
"for details."
)
warnings.warn(msg)
import cherrypy
cherrypy.config.update(
{
"server.socket_port": int(port),
"server.thread_pool": 10,
"environment": "production",
}
)
cherrypy.quickstart(Profiler(path))
if __name__ == "__main__":
serve(*tuple(sys.argv[1:]))
|
Cloud | ToolPathUploader | # Copyright (c) 2019 Ultimaker B.V.
# !/usr/bin/env python
# -*- coding: utf-8 -*-
from typing import Any, Callable, Dict, Optional, Tuple, cast
from PyQt6.QtNetwork import QNetworkReply, QNetworkRequest
from UM.Logger import Logger
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
from ..Models.Http.CloudPrintJobResponse import CloudPrintJobResponse
class ToolPathUploader:
"""Class responsible for uploading meshes to the cloud in separate requests."""
# The maximum amount of times to retry if the server returns one of the RETRY_HTTP_CODES
MAX_RETRIES = 10
# The HTTP codes that should trigger a retry.
RETRY_HTTP_CODES = {500, 502, 503, 504}
def __init__(
self,
http: HttpRequestManager,
print_job: CloudPrintJobResponse,
data: bytes,
on_finished: Callable[[], Any],
on_progress: Callable[[int], Any],
on_error: Callable[[], Any],
) -> None:
"""Creates a mesh upload object.
:param manager: The network access manager that will handle the HTTP requests.
:param print_job: The print job response that was returned by the cloud after registering the upload.
:param data: The mesh bytes to be uploaded.
:param on_finished: The method to be called when done.
:param on_progress: The method to be called when the progress changes (receives a percentage 0-100).
:param on_error: The method to be called when an error occurs.
"""
self._http = http
self._print_job = print_job
self._data = data
self._on_finished = on_finished
self._on_progress = on_progress
self._on_error = on_error
self._retries = 0
self._finished = False
@property
def printJob(self):
"""Returns the print job for which this object was created."""
return self._print_job
def start(self) -> None:
"""Starts uploading the mesh."""
if self._finished:
# reset state.
self._retries = 0
self._finished = False
self._upload()
def stop(self):
"""Stops uploading the mesh, marking it as finished."""
Logger.log("i", "Finished uploading")
self._finished = (
True # Signal to any ongoing retries that we should stop retrying.
)
self._on_finished()
def _upload(self) -> None:
"""
Uploads the print job to the cloud printer.
"""
if self._finished:
raise ValueError("The upload is already finished")
Logger.log(
"i",
"Uploading print to {upload_url}".format(
upload_url=self._print_job.upload_url
),
)
self._http.put(
url=cast(str, self._print_job.upload_url),
headers_dict={"Content-Type": cast(str, self._print_job.content_type)},
data=self._data,
callback=self._finishedCallback,
error_callback=self._errorCallback,
upload_progress_callback=self._progressCallback,
)
def _progressCallback(self, bytes_sent: int, bytes_total: int) -> None:
"""Handles an update to the upload progress
:param bytes_sent: The amount of bytes sent in the current request.
:param bytes_total: The amount of bytes to send in the current request.
"""
Logger.debug("Cloud upload progress %s / %s", bytes_sent, bytes_total)
if bytes_total:
self._on_progress(int(bytes_sent / len(self._data) * 100))
## Handles an error uploading.
def _errorCallback(
self, reply: QNetworkReply, error: QNetworkReply.NetworkError
) -> None:
"""Handles an error uploading."""
body = bytes(reply.readAll()).decode()
Logger.log("e", "Received error while uploading: %s", body)
self.stop()
self._on_error()
def _finishedCallback(self, reply: QNetworkReply) -> None:
"""Checks whether a chunk of data was uploaded successfully, starting the next chunk if needed."""
Logger.log(
"i",
"Finished callback %s %s",
reply.attribute(QNetworkRequest.Attribute.HttpStatusCodeAttribute),
reply.url().toString(),
)
status_code = reply.attribute(QNetworkRequest.Attribute.HttpStatusCodeAttribute) # type: Optional[int]
if not status_code:
Logger.log("e", "Reply contained no status code.")
self._errorCallback(reply, None)
return
# check if we should retry the last chunk
if self._retries < self.MAX_RETRIES and status_code in self.RETRY_HTTP_CODES:
self._retries += 1
Logger.log(
"i",
"Retrying %s/%s request %s",
self._retries,
self.MAX_RETRIES,
reply.url().toString(),
)
try:
self._upload()
except (
ValueError
): # Asynchronously it could have completed in the meanwhile.
pass
return
# Http codes that are not to be retried are assumed to be errors.
if status_code > 308:
self._errorCallback(reply, None)
return
Logger.log(
"d",
"status_code: %s, Headers: %s, body: %s",
status_code,
[bytes(header).decode() for header in reply.rawHeaderList()],
bytes(reply.readAll()).decode(),
)
self.stop()
|
Arch | ArchStairs | # ***************************************************************************
# * Copyright (c) 2013 Yorik van Havre <yorik@uncreated.net> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD Arch Stairs"
__author__ = "Yorik van Havre"
__url__ = "https://www.freecad.org"
import math
import ArchComponent
import ArchPipe
import Draft
import DraftGeomUtils
import DraftVecUtils
import FreeCAD
import Part
from FreeCAD import Vector
if FreeCAD.GuiUp:
import FreeCADGui
from draftutils.translate import translate
from PySide.QtCore import QT_TRANSLATE_NOOP
else:
# \cond
def translate(ctxt, txt):
return txt
def QT_TRANSLATE_NOOP(ctxt, txt):
return txt
# \endcond
## @package ArchStairs
# \ingroup ARCH
# \brief The Stairs object and tools
#
# This module provides tools to build Stairs objects.
zeroMM = FreeCAD.Units.Quantity("0mm")
def makeStairs(
baseobj=None, length=None, width=None, height=None, steps=None, name=None
):
"""makeStairs([baseobj],[length],[width],[height],[steps],[name]): creates a Stairs
objects with given attributes."""
if not FreeCAD.ActiveDocument:
FreeCAD.Console.PrintError("No active document. Aborting\n")
return
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
stairs = []
additions = []
label = name if name else translate("Arch", "Stairs")
def setProperty(obj, length, width, height, steps):
if length:
obj.Length = length
else:
obj.Length = p.GetFloat("StairsLength", 4500.0)
if width:
obj.Width = width
else:
obj.Width = p.GetFloat("StairsWidth", 1000.0)
if height:
obj.Height = height
else:
obj.Height = p.GetFloat("StairsHeight", 3000.0)
if steps:
obj.NumberOfSteps = steps
obj.Structure = "Massive"
obj.StructureThickness = 150
obj.DownSlabThickness = 150
obj.UpSlabThickness = 150
obj.RailingOffsetLeft = 60
obj.RailingOffsetRight = 60
obj.RailingHeightLeft = 900
obj.RailingHeightRight = 900
if baseobj:
if not isinstance(baseobj, list):
baseobj = [baseobj]
lenSelection = len(baseobj)
if lenSelection > 1:
stair = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", "Stairs")
stair.Label = label
_Stairs(stair)
stairs.append(stair)
stairs[0].Label = label
i = 1
else:
i = 0
for baseobjI in baseobj:
stair = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", "Stairs")
stair.Label = label
_Stairs(stair)
stairs.append(stair)
stairs[i].Label = label
stairs[i].Base = baseobjI
if len(baseobjI.Shape.Edges) > 1:
stepsI = 1 #'landing' if 'multi-edges' currently
elif steps:
stepsI = steps
else:
stepsI = 20
setProperty(stairs[i], None, width, height, stepsI)
if i > 1:
additions.append(stairs[i])
stairs[i].LastSegment = stairs[i - 1]
else:
if len(stairs) > 1: # i.e. length >1, have a 'master' staircase created
stairs[0].Base = stairs[1]
i += 1
if lenSelection > 1:
stairs[0].Additions = additions
else:
obj = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", "Stairs")
obj.Label = label
_Stairs(obj)
setProperty(obj, length, width, height, steps)
stairs.append(obj)
if FreeCAD.GuiUp:
if baseobj:
for stair in stairs:
_ViewProviderStairs(stair.ViewObject)
else:
_ViewProviderStairs(obj.ViewObject)
if stairs:
for stair in stairs:
stair.recompute()
makeRailing(stairs)
# return stairs - all other functions expect one object as return value
return stairs[0]
else:
obj.recompute()
return obj
def makeRailing(stairs):
"simple make Railing function testing"
def makeRailingLorR(stairs, side="L"):
for stair in reversed(stairs):
if side == "L":
outlineLR = stair.OutlineLeft
outlineLRAll = stair.OutlineLeftAll
stairRailingLR = "RailingLeft"
elif side == "R":
outlineLR = stair.OutlineRight
outlineLRAll = stair.OutlineRightAll
stairRailingLR = "RailingRight"
if outlineLR or outlineLRAll:
lrRail = ArchPipe.makePipe(
baseobj=None,
diameter=0,
length=0,
placement=None,
name=translate("Arch", "Railing"),
)
if outlineLRAll:
setattr(stair, stairRailingLR, lrRail)
break
elif outlineLR:
setattr(stair, stairRailingLR, lrRail)
if stairs is None:
sel = FreeCADGui.Selection.getSelection()
sel0 = sel[0]
stairs = []
# TODO currently consider 1st selected object, then would tackle multiple objects?
if Draft.getType(sel[0]) == "Stairs":
stairs.append(sel0)
if Draft.getType(sel0.Base) == "Stairs":
stairs.append(sel0.Base)
additions = sel0.Additions
for additionsI in additions:
if Draft.getType(additionsI) == "Stairs":
stairs.append(additionsI)
else:
stairs.append(sel[0])
else:
print("No Stairs object selected")
return
makeRailingLorR(stairs, "L")
makeRailingLorR(stairs, "R")
class _CommandStairs:
"the Arch Stairs command definition"
def GetResources(self):
return {
"Pixmap": "Arch_Stairs",
"MenuText": QT_TRANSLATE_NOOP("Arch_Stairs", "Stairs"),
"Accel": "S, R",
"ToolTip": QT_TRANSLATE_NOOP("Arch_Stairs", "Creates a flight of stairs"),
}
def IsActive(self):
return not FreeCAD.ActiveDocument is None
def Activated(self):
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
FreeCAD.ActiveDocument.openTransaction(translate("Arch", "Create Stairs"))
FreeCADGui.addModule("Arch")
# a list of 'segment' / 'flight' of stairs
stairs = []
additions = []
if len(FreeCADGui.Selection.getSelection()) > 0:
n = []
nStr = ""
for obj in FreeCADGui.Selection.getSelection():
n.append(obj.Name) # would no longer use
if nStr != "":
nStr = nStr + ","
nStr = nStr + "FreeCAD.ActiveDocument." + obj.Name
FreeCADGui.doCommand("obj = Arch.makeStairs(baseobj=[" + nStr + "])")
else:
FreeCADGui.doCommand(
"obj = Arch.makeStairs(steps=" + str(p.GetInt("StairsSteps", 17)) + ")"
)
FreeCADGui.addModule("Draft")
for obj in stairs:
Draft.autogroup(obj) # seems not working?
FreeCAD.ActiveDocument.commitTransaction()
FreeCAD.ActiveDocument.recompute()
class _Stairs(ArchComponent.Component):
"A stairs object"
def __init__(self, obj):
ArchComponent.Component.__init__(self, obj)
self.setProperties(obj)
obj.IfcType = "Stair"
def setProperties(self, obj):
# http://en.wikipedia.org/wiki/Stairs
pl = obj.PropertiesList
# base properties
if not "Length" in pl:
obj.addProperty(
"App::PropertyLength",
"Length",
"Stairs",
QT_TRANSLATE_NOOP(
"App::Property",
"The length of these stairs, if no baseline is defined",
),
)
if not "Width" in pl:
obj.addProperty(
"App::PropertyLength",
"Width",
"Stairs",
QT_TRANSLATE_NOOP("App::Property", "The width of these stairs"),
)
if not "Height" in pl:
obj.addProperty(
"App::PropertyLength",
"Height",
"Stairs",
QT_TRANSLATE_NOOP("App::Property", "The total height of these stairs"),
)
if not "Align" in pl:
obj.addProperty(
"App::PropertyEnumeration",
"Align",
"Stairs",
QT_TRANSLATE_NOOP(
"App::Property",
"The alignment of these stairs on their baseline, if applicable",
),
)
obj.Align = ["Left", "Right", "Center"]
# TODO - To be combined into Width when PropertyLengthList is available
if not "WidthOfLanding" in pl:
obj.addProperty(
"App::PropertyFloatList",
"WidthOfLanding",
"Stairs",
QT_TRANSLATE_NOOP(
"App::Property",
"The width of a Landing (Second edge and after - First edge follows Width property)",
),
)
# steps and risers properties
if not "NumberOfSteps" in pl:
obj.addProperty(
"App::PropertyInteger",
"NumberOfSteps",
"Steps",
QT_TRANSLATE_NOOP(
"App::Property", "The number of risers in these stairs"
),
)
if not "TreadDepth" in pl:
obj.addProperty(
"App::PropertyLength",
"TreadDepth",
"Steps",
QT_TRANSLATE_NOOP(
"App::Property", "The depth of the treads of these stairs"
),
)
obj.setEditorMode("TreadDepth", 1)
if not "RiserHeight" in pl:
obj.addProperty(
"App::PropertyLength",
"RiserHeight",
"Steps",
QT_TRANSLATE_NOOP(
"App::Property", "The height of the risers of these stairs"
),
)
obj.setEditorMode("RiserHeight", 1)
if not "Nosing" in pl:
obj.addProperty(
"App::PropertyLength",
"Nosing",
"Steps",
QT_TRANSLATE_NOOP("App::Property", "The size of the nosing"),
)
if not "TreadThickness" in pl:
obj.addProperty(
"App::PropertyLength",
"TreadThickness",
"Steps",
QT_TRANSLATE_NOOP("App::Property", "The thickness of the treads"),
)
if not "BlondelRatio" in pl:
obj.addProperty(
"App::PropertyFloat",
"BlondelRatio",
"Steps",
QT_TRANSLATE_NOOP(
"App::Property",
"The Blondel ratio indicates comfortable stairs and should be between 62 and 64cm or 24.5 and 25.5in",
),
)
obj.setEditorMode("BlondelRatio", 1)
if not "RiserThickness" in pl:
obj.addProperty(
"App::PropertyLength",
"RiserThickness",
"Steps",
QT_TRANSLATE_NOOP("App::Property", "The thickness of the risers"),
)
if not hasattr(obj, "LandingDepth"):
obj.addProperty(
"App::PropertyLength",
"LandingDepth",
"Steps",
QT_TRANSLATE_NOOP(
"App::Property", "The depth of the landing of these stairs"
),
)
if not hasattr(obj, "TreadDepthEnforce"):
obj.addProperty(
"App::PropertyLength",
"TreadDepthEnforce",
"Steps",
QT_TRANSLATE_NOOP(
"App::Property",
"The depth of the treads of these stairs - Enforced regardless of Length or edge's Length",
),
)
if not hasattr(obj, "RiserHeightEnforce"):
obj.addProperty(
"App::PropertyLength",
"RiserHeightEnforce",
"Steps",
QT_TRANSLATE_NOOP(
"App::Property",
"The height of the risers of these stairs - Enforced regardless of Height or edge's Height",
),
)
if not hasattr(obj, "Flight"):
obj.addProperty(
"App::PropertyEnumeration",
"Flight",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property", "The direction of flight after landing"
),
)
obj.Flight = ["Straight", "HalfTurnLeft", "HalfTurnRight"]
# Segment and Parts properties
if not hasattr(obj, "LastSegment"):
obj.addProperty(
"App::PropertyLink",
"LastSegment",
"Segment and Parts",
"Last Segment (Flight or Landing) of Arch Stairs connecting to This Segment",
)
if not hasattr(obj, "AbsTop"):
obj.addProperty(
"App::PropertyVector",
"AbsTop",
"Segment and Parts",
QT_TRANSLATE_NOOP(
"App::Property",
"The 'absolute' top level of a flight of stairs leads to",
),
)
obj.setEditorMode("AbsTop", 1)
if not hasattr(obj, "OutlineLeft"):
obj.addProperty(
"App::PropertyVectorList",
"OutlineLeft",
"Segment and Parts",
QT_TRANSLATE_NOOP("App::Property", "The 'left outline' of stairs"),
) # Used for Outline of Railing
obj.setEditorMode("OutlineLeft", 1)
if not hasattr(obj, "OutlineRight"):
obj.addProperty(
"App::PropertyVectorList",
"OutlineRight",
"Segment and Parts",
QT_TRANSLATE_NOOP("App::Property", "The 'left outline' of stairs"),
)
obj.setEditorMode("OutlineRight", 1)
# Can't accept 'None' in list, need NaN
# if not hasattr(obj,"OutlineRailArcLeft"):
# obj.addProperty("App::PropertyVectorList","OutlineRailArcLeft","Segment and Parts",QT_TRANSLATE_NOOP("App::Property","The 'left outline' 'arc points' of stairs railing"))
# obj.setEditorMode("OutlineRailArcLeft",1)
# if not hasattr(obj,"OutlineRailArcRight"):
# obj.addProperty("App::PropertyVectorList","OutlineRailArcRight","Segment and Parts",QT_TRANSLATE_NOOP("App::Property","The 'right outline' 'arc points of stairs railing"))
# obj.setEditorMode("OutlineRailArcRight",1)
if not hasattr(self, "OutlineRailArcLeft"):
self.OutlineRailArcLeft = []
if not hasattr(self, "OutlineRailArcRight"):
self.OutlineRailArcRight = []
if not hasattr(obj, "RailingLeft"):
obj.addProperty(
"App::PropertyLinkHidden",
"RailingLeft",
"Segment and Parts",
"Name of Railing object (left) created",
)
if not hasattr(obj, "RailingRight"):
obj.addProperty(
"App::PropertyLinkHidden",
"RailingRight",
"Segment and Parts",
"Name of Railing object (right) created",
)
if not hasattr(obj, "OutlineLeftAll"):
obj.addProperty(
"App::PropertyVectorList",
"OutlineLeftAll",
"Segment and Parts",
QT_TRANSLATE_NOOP(
"App::Property", "The 'left outline' of all segments of stairs"
),
)
obj.setEditorMode("OutlineLeftAll", 1) # Used for Outline of Railing
if not hasattr(obj, "OutlineRightAll"):
obj.addProperty(
"App::PropertyVectorList",
"OutlineRightAll",
"Segment and Parts",
QT_TRANSLATE_NOOP(
"App::Property", "The 'right outline' of all segments of stairs"
),
)
obj.setEditorMode("OutlineRightAll", 1)
# Can't accept 'None' in list, need NaN
# if not hasattr(obj,"OutlineRailArcLeftAll"):
# obj.addProperty("App::PropertyVectorList","OutlineRailArcLeftAll","Segment and Parts",QT_TRANSLATE_NOOP("App::Property","The 'left outline' 'arc points' of all segments of stairs railing"))
# obj.setEditorMode("OutlineRailArcLeftAll",1) # Used for Outline of Railing
# if not hasattr(obj,"OutlineRailArcRightAll"):
# obj.addProperty("App::PropertyVectorList","OutlineRailArcRightAll","Segment and Parts",QT_TRANSLATE_NOOP("App::Property","The 'right outline' 'arc points' of all segments of stairs railing"))
# obj.setEditorMode("OutlineRailArcRightAll",1)
if not hasattr(self, "OutlineRailArcLeftAll"):
self.OutlineRailArcLeftAll = []
if not hasattr(self, "OutlineRailArcRightAll"):
self.OutlineRailArcRightAll = []
if not hasattr(obj, "RailingHeightLeft"):
obj.addProperty(
"App::PropertyLength",
"RailingHeightLeft",
"Segment and Parts",
"Height of Railing on Left hand side from Stairs or Landing ",
)
if not hasattr(obj, "RailingHeightRight"):
obj.addProperty(
"App::PropertyLength",
"RailingHeightRight",
"Segment and Parts",
"Height of Railing on Right hand side from Stairs or Landing ",
)
if not hasattr(obj, "RailingOffsetLeft"):
obj.addProperty(
"App::PropertyLength",
"RailingOffsetLeft",
"Segment and Parts",
"Offset of Railing on Left hand side from stairs or landing Edge ",
)
if not hasattr(obj, "RailingOffsetRight"):
obj.addProperty(
"App::PropertyLength",
"RailingOffsetRight",
"Segment and Parts",
"Offset of Railing on Right hand side from stairs or landing Edge ",
)
# structural properties
if not "Landings" in pl:
obj.addProperty(
"App::PropertyEnumeration",
"Landings",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property", "The type of landings of these stairs"
),
)
obj.Landings = ["None", "At center", "At each corner"]
if not "Winders" in pl:
obj.addProperty(
"App::PropertyEnumeration",
"Winders",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property", "The type of winders in these stairs"
),
)
obj.Winders = ["None", "All", "Corners strict", "Corners relaxed"]
if not "Structure" in pl:
obj.addProperty(
"App::PropertyEnumeration",
"Structure",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property", "The type of structure of these stairs"
),
)
obj.Structure = ["None", "Massive", "One stringer", "Two stringers"]
if not "StructureThickness" in pl:
obj.addProperty(
"App::PropertyLength",
"StructureThickness",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property",
"The thickness of the massive structure or of the stringers",
),
)
if not "StringerWidth" in pl:
obj.addProperty(
"App::PropertyLength",
"StringerWidth",
"Structure",
QT_TRANSLATE_NOOP("App::Property", "The width of the stringers"),
)
if not "StructureOffset" in pl:
obj.addProperty(
"App::PropertyLength",
"StructureOffset",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property",
"The offset between the border of the stairs and the structure",
),
)
if not "StringerOverlap" in pl:
obj.addProperty(
"App::PropertyLength",
"StringerOverlap",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property",
"The overlap of the stringers above the bottom of the treads",
),
)
if not "DownSlabThickness" in pl:
obj.addProperty(
"App::PropertyLength",
"DownSlabThickness",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property", "The thickness of the lower floor slab"
),
)
if not "UpSlabThickness" in pl:
obj.addProperty(
"App::PropertyLength",
"UpSlabThickness",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property", "The thickness of the upper floor slab"
),
)
if not "ConnectionDownStartStairs" in pl:
obj.addProperty(
"App::PropertyEnumeration",
"ConnectionDownStartStairs",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property",
"The type of connection between the lower floor slab and the start of the stairs",
),
)
obj.ConnectionDownStartStairs = [
"HorizontalCut",
"VerticalCut",
"HorizontalVerticalCut",
]
if not "ConnectionEndStairsUp" in pl:
obj.addProperty(
"App::PropertyEnumeration",
"ConnectionEndStairsUp",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property",
"The type of connection between the end of the stairs and the upper floor slab",
),
)
obj.ConnectionEndStairsUp = ["toFlightThickness", "toSlabThickness"]
self.Type = "Stairs"
def onDocumentRestored(self, obj):
ArchComponent.Component.onDocumentRestored(self, obj)
self.setProperties(obj)
if hasattr(obj, "OutlineWireLeft"):
self.update_properties_0v18_to_0v20(obj)
if obj.getTypeIdOfProperty("RailingLeft") == "App::PropertyString":
self.update_properties_0v19_to_0v20(obj)
def update_properties_0v18_to_0v20(self, obj):
doc = FreeCAD.ActiveDocument
outlineWireLeftObject = doc.getObject(obj.OutlineWireLeft)
outlineWireRightObject = doc.getObject(obj.OutlineWireRight)
try:
obj.RailingLeft = outlineWireLeftObject.InList[0]
except Exception:
pass
try:
obj.RailingRight = outlineWireRightObject.InList[0]
except Exception:
pass
obj.removeProperty("OutlineWireLeft")
obj.removeProperty("OutlineWireRight")
self.update_properties_to_0v20(obj)
from draftutils.messages import _wrn
_wrn(
"v0.20.3, "
+ obj.Label
+ ", "
+ translate(
"Arch",
"removed properties 'OutlineWireLeft' and 'OutlineWireRight', and added properties 'RailingLeft' and 'RailingRight'",
)
)
def update_properties_0v19_to_0v20(self, obj):
doc = FreeCAD.ActiveDocument
railingLeftObject = doc.getObject(obj.RailingLeft)
railingRightObject = doc.getObject(obj.RailingRight)
obj.removeProperty("RailingLeft")
obj.removeProperty("RailingRight")
self.setProperties(obj)
obj.RailingLeft = railingLeftObject
obj.RailingRight = railingRightObject
self.update_properties_to_0v20(obj)
from draftutils.messages import _wrn
_wrn(
"v0.20.3, "
+ obj.Label
+ ", "
+ translate(
"Arch",
"changed the type of properties 'RailingLeft' and 'RailingRight'",
)
)
def update_properties_to_0v20(self, obj):
additions = obj.Additions
if obj.RailingLeft in additions:
additions.remove(obj.RailingLeft)
if obj.RailingRight in additions:
additions.remove(obj.RailingRight)
obj.Additions = additions
if obj.RailingLeft is not None:
obj.RailingLeft.Visibility = True
if obj.RailingRight is not None:
obj.RailingRight.Visibility = True
def execute(self, obj):
"constructs the shape of the stairs"
if self.clone(obj):
return
self.steps = []
self.risers = []
self.pseudosteps = []
self.structures = []
pl = obj.Placement
landings = 0 # ? Any use - paul 2018.7.15
base = None
if obj.Base:
if hasattr(obj.Base, "Shape"):
if obj.Base.Shape:
if obj.Base.Shape.Solids:
base = obj.Base.Shape.copy()
# special case NumberOfSteps = 1 : multi-edges landing
if (
(not base)
and obj.Width.Value
and obj.Height.Value
and (obj.NumberOfSteps > 0)
):
if obj.Base:
if not hasattr(obj.Base, "Shape"):
return
if obj.Base.Shape.Solids:
obj.Shape = obj.Base.Shape.copy()
obj.Placement = FreeCAD.Placement(obj.Base.Placement).multiply(pl)
obj.TreadDepth = 0.0
obj.RiserHeight = 0.0
return
if not obj.Base.Shape.Edges:
return
if obj.Base.Shape.Faces:
return
if len(obj.Base.Shape.Edges) == 1:
edge = obj.Base.Shape.Edges[0]
if isinstance(edge.Curve, (Part.LineSegment, Part.Line)):
# preparing for multi-edges landing / segment staircase
if obj.NumberOfSteps > 1:
self.makeStraightStairsWithLanding(
obj, edge
) # all cases use makeStraightStairsWithLanding()
# preparing for multi-edges landing / segment staircase
if obj.NumberOfSteps == 1:
# TODO - All use self.makeMultiEdgesLanding(obj,edges) ?
self.makeStraightLanding(obj, edge)
if obj.NumberOfSteps == 0:
pass # Should delete the whole shape
else:
if obj.Landings == "At center":
landings = 1
self.makeCurvedStairsWithLanding(obj, edge)
else:
self.makeCurvedStairs(obj, edge)
elif len(obj.Base.Shape.Edges) >= 1:
# if obj.NumberOfSteps == 1:
# Sort the edges so each vertex tested of its tangent direction in order
## TODO - Found Part.sortEdges() occasionally return less edges then 'input'
edges = Part.sortEdges(obj.Base.Shape.Edges)[0]
self.makeMultiEdgesLanding(obj, edges)
else:
if not obj.Length.Value:
return
edge = Part.LineSegment(
Vector(0, 0, 0), Vector(obj.Length.Value, 0, 0)
).toShape()
self.makeStraightStairsWithLanding(obj, edge)
if self.structures or self.steps or self.risers:
base = Part.makeCompound(self.structures + self.steps + self.risers)
elif self.pseudosteps:
shape = Part.makeCompound(self.pseudosteps)
obj.Shape = shape
obj.Placement = pl
return
base = self.processSubShapes(obj, base, pl)
if base:
if not base.isNull():
obj.Shape = base
obj.Placement = pl
railingLeftObject, railWireL = None, None
railingRightObject, railWireR = None, None
doc = FreeCAD.ActiveDocument
if obj.RailingLeft:
railingLeftObject = obj.RailingLeft
if obj.OutlineLeftAll:
railWireL, NU = _Stairs.returnOutlineWireFace(
obj.OutlineLeftAll, self.OutlineRailArcLeftAll, mode="notFaceAlso"
)
elif obj.OutlineLeft:
railWireL, NU = _Stairs.returnOutlineWireFace(
obj.OutlineLeft, self.OutlineRailArcLeft, mode="notFaceAlso"
)
else:
print(" No obj.OutlineLeftAll or obj.OutlineLeft")
if railWireL:
if (
Draft.getType(railingLeftObject.Base) != "Part::Feature"
): # Base can have wrong type or be None.
if railingLeftObject.Base:
doc.removeObject(railingLeftObject.Base.Name)
railingLeftWireObject = doc.addObject(
"Part::Feature", "RailingWire"
)
railingLeftObject.Base = railingLeftWireObject
# update the Base object shape
railingLeftObject.Base.Shape = railWireL
else:
print(" No railWireL created ")
if obj.RailingRight:
railingRightObject = obj.RailingRight
if obj.OutlineRightAll:
railWireR, NU = _Stairs.returnOutlineWireFace(
obj.OutlineRightAll, self.OutlineRailArcRightAll, mode="notFaceAlso"
)
elif obj.OutlineLeft:
railWireR, NU = _Stairs.returnOutlineWireFace(
obj.OutlineLeft, self.OutlineRailArcRight, mode="notFaceAlso"
)
else:
print(" No obj.OutlineRightAll or obj.OutlineLeft")
if railWireR:
if (
Draft.getType(railingRightObject.Base) != "Part::Feature"
): # Base can have wrong type or be None.
if railingRightObject.Base:
doc.removeObject(railingRightObject.Base.Name)
railingRightWireObject = doc.addObject(
"Part::Feature", "RailingWire"
)
railingRightObject.Base = railingRightWireObject
# update the Base object shape
railingRightObject.Base.Shape = railWireR
else:
print(" No railWireL created ")
# compute step data
# if obj.NumberOfSteps > 1:
if False: # TODO - To be deleted
l = obj.Length.Value
h = obj.Height.Value
if obj.Base:
if hasattr(obj.Base, "Shape"):
l = obj.Base.Shape.Length
if obj.Base.Shape.BoundBox.ZLength:
h = obj.Base.Shape.BoundBox.ZLength
if obj.LandingDepth:
obj.TreadDepth = float(l - (landings * obj.LandingDepth.Value)) / (
obj.NumberOfSteps - (1 + landings)
)
else:
obj.TreadDepth = float(l - (landings * obj.Width.Value)) / (
obj.NumberOfSteps - (1 + landings)
)
obj.RiserHeight = float(h) / obj.NumberOfSteps
obj.BlondelRatio = obj.RiserHeight.Value * 2 + obj.TreadDepth.Value
@staticmethod
def align(basepoint, align, widthvec):
"moves a given basepoint according to the alignment"
if align == "Center":
basepoint = basepoint.add(DraftVecUtils.scale(widthvec, -0.5))
elif align == "Right":
basepoint = basepoint.add(DraftVecUtils.scale(widthvec, -1))
return basepoint
def makeMultiEdgesLanding(self, obj, edges):
"builds a 'multi-edges' landing from edges" # 'copying' from makeStraightLanding()
(
outline,
outlineL,
outlineR,
vBase1,
outlineP1P2ClosedNU,
outlineP3P4ClosedNU,
NU,
pArc,
pArcL,
pArcR,
) = self.returnOutlines(
obj,
edges,
obj.Align,
None,
obj.Width,
obj.WidthOfLanding,
obj.TreadThickness,
zeroMM,
zeroMM,
zeroMM,
zeroMM,
zeroMM,
True,
)
obj.AbsTop = vBase1[0]
stepWire, stepFace = _Stairs.returnOutlineWireFace(
outline, pArc, mode="faceAlso"
) # (outlinePoints, pArc, mode="wire or faceAlso")
if obj.TreadThickness.Value:
step = stepFace.extrude(Vector(0, 0, abs(obj.TreadThickness.Value)))
self.steps.append(step)
else:
self.pseudosteps.append(stepFace)
if obj.StructureThickness.Value:
landingFace = stepFace
struct = landingFace.extrude(
Vector(0, 0, -abs(obj.StructureThickness.Value))
)
if struct:
self.structures.append(struct)
self.makeRailingOutline(obj, edges)
def makeRailingOutline(self, obj, edges):
"builds railing outline"
(
outlineNotUsed,
outlineRailL,
outlineRailR,
vBase2,
outlineP1P2ClosedNU,
outlineP3P4ClosedNU,
NU,
NU,
pArcRailL,
pArcRailR,
) = self.returnOutlines(
obj,
edges,
obj.Align,
None,
obj.Width,
obj.WidthOfLanding,
obj.TreadThickness,
zeroMM,
obj.RailingOffsetLeft,
obj.RailingOffsetRight,
obj.RailingHeightLeft,
obj.RailingHeightRight,
True,
)
self.connectRailingVector(obj, outlineRailL, outlineRailR, pArcRailL, pArcRailR)
@staticmethod
def returnOutlineWireFace(outlinePoints, pArc, mode="wire or faceAlso"):
stepFace = None
if not any(pArc): # i.e. no arc ... though any([0, '', False]):- is False
stepWire = Part.makePolygon(outlinePoints)
if mode == "faceAlso":
stepFace = Part.Face(stepWire)
else:
edges = []
enum_outlinePoints = enumerate(outlinePoints)
lenoutlinePoints = len(outlinePoints)
for k, a in enum_outlinePoints:
if k < (
lenoutlinePoints - 1
): # iterate to last but 1: [k], [k+1] ... len() is +1 over index
if pArc[k] is None:
edges.append(
Part.LineSegment(
outlinePoints[k], outlinePoints[k + 1]
).toShape()
)
else:
edges.append(
Part.Arc(
outlinePoints[k], pArc[k], outlinePoints[k + 1]
).toShape()
)
stepWire = Part.Wire(edges)
if mode == "faceAlso":
stepFace = Part.Face(stepWire)
return stepWire, stepFace
@staticmethod # obj become stairsObj
def returnOutlines(
stairsObj,
edges,
align="Left",
mode=None,
widthFirstSegment=zeroMM,
widthOtherSegments=[],
treadThickness=zeroMM,
railStartRiser=zeroMM,
offsetHLeft=zeroMM,
offsetHRight=zeroMM,
offsetVLeft=zeroMM,
offsetVRight=zeroMM,
widthFirstSegmentDefault=False,
):
"""Construct outline of stairs landing or the like from Edges - Side effect is vertexes are 'ordered' in series of findIntersection() functions"""
""" outlineP1P2Ordered seem no use at the moment """
# import DraftGeomUtils
v, vLength, vWidth, vBase = [], [], [], []
p1, p2, p3, p4, pArc, pArc1, pArc2 = (
[],
[],
[],
[],
[],
[],
[],
) # p1o, p2o - Not used
(
outline,
outlineP1P2,
outlineP3P4,
outlineP1P2Closed,
outlineP3P4Closed,
outlineP1P2Ordered,
) = ([], [], [], [], [], [])
if not isinstance(edges, list):
edges = [edges]
enum_edges = enumerate(edges)
for i, edge in enum_edges:
isLine = isinstance(edge.Curve, (Part.Line, Part.LineSegment))
isArc = isinstance(
edge.Curve, Part.Circle
) # why it is Part.Circle for an Arc Edge? - why Part.ArcOfCircle Not Working?
""" (1) append v (vec) """
v.append(
DraftGeomUtils.vec(edge)
) # TODO check all function below ok with curve?
""" (2) get netWidthI """
netWidthI = 0
if i > 0:
try:
if widthOtherSegments[i - 1] > 0 or (not widthFirstSegmentDefault):
netWidthI = (
widthOtherSegments[i - 1]
- offsetHLeft.Value
- offsetHRight.Value
) # 2*offsetH
else: # i.e. elif widthFirstSegmentDefault:
netWidthI = (
widthFirstSegment.Value
- offsetHLeft.Value
- offsetHRight.Value
) # 2*offsetH
except Exception:
if widthFirstSegmentDefault:
netWidthI = (
widthFirstSegment.Value
- offsetHLeft.Value
- offsetHRight.Value
) # 2*offsetH
else:
netWidthI = (
widthFirstSegment.Value - offsetHLeft.Value - offsetHRight.Value
) # 2*offsetH
""" (3) append vBase """
vBase.append(edges[i].Vertexes[0].Point)
if isArc:
vBase1 = edge.Vertexes[1].Point
vBase2 = edge.valueAt((edge.LastParameter + edge.FirstParameter) / 2)
# vBase2vec = (vBase2-vBase[i]) # - would not be correct if Align is not Left
""" (1a) calc & append vLength - Need v (vec) """
vLength.append(
Vector(v[i].x, v[i].y, v[i].z)
) # TODO check all function below ok with curve? # TODO vLength in this f() is 3d
""" (1b, 2a) calc & append vWidth - Need vLength, netWidthI """
# vWidth.append(DraftVecUtils.scaleTo(vLength[i].cross(Vector(0,0,1)),netWidthI))
if isLine:
dvec = vLength[i].cross(Vector(0, 0, 1))
elif isArc:
# dvec = edge.Vertexes[0].Point.sub(edge.Curve.Center) # TODO - how to determine direction? - Reference from ArchWall; used tangentAt instead
# dvec1 = edge.Vertexes[1].Point.sub(edge.Curve.Center)
dvec = edge.tangentAt(edge.FirstParameter).cross(Vector(0, 0, 1))
dvec1 = edge.tangentAt(edge.LastParameter).cross(Vector(0, 0, 1))
dvec2 = edge.tangentAt(
(edge.LastParameter + edge.FirstParameter) / 2
).cross(Vector(0, 0, 1))
vWidth.append(DraftVecUtils.scaleTo(dvec, netWidthI))
if isArc:
vWidth1 = DraftVecUtils.scaleTo(dvec1, netWidthI)
vWidth2 = DraftVecUtils.scaleTo(dvec2, netWidthI)
""" (3a) alter vBase """
if stairsObj:
vBase[i] = stairsObj.Proxy.vbaseFollowLastSegment(stairsObj, vBase[i])
if isArc:
vBase1 = stairsObj.Proxy.vbaseFollowLastSegment(stairsObj, vBase1)
vBase2 = stairsObj.Proxy.vbaseFollowLastSegment(stairsObj, vBase2)
vBase[i] = vBase[i].add(Vector(0, 0, offsetVLeft.Value))
vBase[i] = vBase[i].add(Vector(0, 0, railStartRiser.Value))
if isArc:
vBase1 = vBase1.add(
Vector(0, 0, offsetVLeft.Value)
) # TODO - if arc is flight (sloping then), arc would be ellipse, so the following become incorrect?
vBase1 = vBase1.add(Vector(0, 0, railStartRiser.Value))
vBase2 = vBase2.add(Vector(0, 0, offsetVLeft.Value))
vBase2 = vBase2.add(Vector(0, 0, railStartRiser.Value))
vOffsetH = DraftVecUtils.scaleTo(dvec, offsetHLeft.Value)
if isArc:
vOffsetH1 = DraftVecUtils.scaleTo(dvec1, offsetHLeft.Value)
vOffsetH2 = DraftVecUtils.scaleTo(dvec2, offsetHLeft.Value)
if align == "Left":
vBase[i] = _Stairs.align(vBase[i], "Right", -vOffsetH)
if isArc:
vBase1 = _Stairs.align(vBase1, "Right", -vOffsetH1)
vBase2 = _Stairs.align(vBase2, "Right", -vOffsetH2)
elif align == "Right":
vBase[i] = _Stairs.align(vBase[i], "Right", vOffsetH)
if isArc:
vBase1 = _Stairs.align(vBase1, "Right", vOffsetH1)
vBase2 = _Stairs.align(vBase2, "Right", vOffsetH2)
""" (3b, 2b/1c) get + alter [p1, p2, p3, p4] - Need vBase """
p1.append(
_Stairs.align(vBase[i], align, vWidth[i]).add(
Vector(0, 0, -abs(treadThickness.Value))
)
) # vWidth already calculated above against arc geometry
if isLine:
p2.append(
p1[i].add(vLength[i]).add(Vector(0, 0, -railStartRiser.Value))
)
p3.append(
p2[i]
.add(vWidth[i])
.add(Vector(0, 0, (offsetVRight - offsetVLeft).Value))
)
p4.append(
p3[i]
.add(DraftVecUtils.neg(vLength[i]))
.add(Vector(0, 0, railStartRiser.Value))
)
pArc1.append(None)
pArc2.append(None)
elif isArc:
p2.append(
_Stairs.align(vBase1, align, vWidth1)
.add(Vector(0, 0, -abs(treadThickness.Value)))
.add(Vector(0, 0, -railStartRiser.Value))
)
p3.append(
p2[i].add(
vWidth1.add(Vector(0, 0, (offsetVRight - offsetVLeft).Value))
)
)
p4.append(
p1[i].add(
vWidth[i].add(Vector(0, 0, (offsetVRight - offsetVLeft).Value))
)
)
pArc1.append(
_Stairs.align(vBase2, align, vWidth2)
.add(Vector(0, 0, -abs(treadThickness.Value)))
.add(Vector(0, 0, -railStartRiser.Value))
)
pArc2.append(
pArc1[i].add(
vWidth2.add(Vector(0, 0, (offsetVRight - offsetVLeft).Value))
)
)
""" (3c, 2c/2d) from [p1, p2, p3, p4] - calc outlineP1P2, outlineP3P4 """
if i > 0:
lastEdge = edges[i - 1] # thisEdge = edge
p1last = p1[i - 1]
p2last = p2[i - 1]
p3last = p3[i - 1]
p4last = p4[i - 1]
p1this = p1[i]
p2this = p2[i]
p3this = p3[i]
p4this = p4[i]
pArc1last = pArc1[i - 1]
pArc2last = pArc2[i - 1]
pArc1this = pArc1[i]
pArc2this = pArc2[i]
lastEdgeIsLineSegmentBool = isinstance(
lastEdge.Curve, (Part.Line, Part.LineSegment)
)
thisEdgeIsLineSegmentBool = isinstance(
edge.Curve, (Part.Line, Part.LineSegment)
)
lastEdgeIsCircleBool = isinstance(
lastEdge.Curve, (Part.Circle)
) # why it is Part.Circle for an Arc Edge? - why Part.ArcOfCircle Not Working?
thisEdgeIsCircleBool = isinstance(edge.Curve, (Part.Circle))
intersectionP1P2, intersectionP3P4 = _Stairs.findLineArcIntersections(
p1last,
p2last,
p3last,
p4last,
p1this,
p2this,
p3this,
p4this,
lastEdgeIsLineSegmentBool,
thisEdgeIsLineSegmentBool,
lastEdgeIsCircleBool,
thisEdgeIsCircleBool,
pArc1last,
pArc2last,
pArc1this,
pArc2this,
)
outlineP1P2.append(intersectionP1P2)
outlineP3P4.insert(0, intersectionP3P4)
else:
outlineP1P2.append(p1[i])
outlineP3P4.insert(0, p4[i])
# add back last/first 'missing' point(s)
outlineP1P2.append(p2[i])
outlineP3P4.insert(0, p3[i])
outline = outlineP1P2 + outlineP3P4
outline.append(p1[0])
pArc1.append(None)
pArc2 = pArc2[::-1] # pArcReverse = pArc2[::-1]
pArc2.append(None)
pArc.extend(pArc1)
pArc.extend(pArc2) # pArc.extend(pArcReverse)
firstEdgeIsLineSegmentBool = isinstance(
edges[0].Curve, (Part.Line, Part.LineSegment)
)
firstEdgeIsCircleBool = isinstance(
edges[0].Curve, (Part.Circle)
) # why it is Part.Circle for an Arc Edge? - why Part.ArcOfCircle Not Working?
if mode in [
"OrderedClose",
"OrderedCloseAndOrderedOpen",
]: # seem only using 'OrderedClose'
intersectionP1P2, intersectionP3P4 = _Stairs.findLineArcIntersections(
p1this,
p2this,
p3this,
p4this,
p1[0],
p2[0],
p3[0],
p4[0],
thisEdgeIsLineSegmentBool,
firstEdgeIsLineSegmentBool,
thisEdgeIsCircleBool,
firstEdgeIsCircleBool,
pArc1this,
pArc2this,
pArc1[0],
pArc2[0],
)
outlineP1P2Closed = list(outlineP1P2)
outlineP1P2Closed[0] = intersectionP1P2 # intersection[0]
outlineP1P2Closed[i + 1] = intersectionP1P2 # intersection[0]
outlineP3P4Closed = list(outlineP3P4)
outlineP3P4Closed[0] = intersectionP3P4 # intersection[0]
outlineP3P4Closed[i + 1] = intersectionP3P4 # intersection[0]
if mode in ["OrderedOpen", "OrderedCloseAndOrderedOpen"]:
if i > 0: # Multi-edge, otherwise no use
outlineP1P2Ordered = list(outlineP1P2)
""" Guessing the 1st Start Point based on Intersection """
vx1 = Vector(outlineP1P2[1].x, outlineP1P2[1].y, 0)
l0 = Part.LineSegment(
edges[0].Vertexes[0].Point, edges[0].Vertexes[1].Point
)
try:
distFrom1stParameter = l0.parameter(vx1)
distFrom2ndParameter = l0.length() - distFrom1stParameter
""" Further point of this line from intersection """
if distFrom2ndParameter > distFrom1stParameter:
foundStart = edges[0].Vertexes[1].Point
else: # if distFrom2ndParameter = / < distFrom1stParameter (i.e. if equal, Vertexes[0].Point is taken ?)
foundStart = edges[0].Vertexes[0].Point
except Exception:
print("Intersection point Not on this edge")
""" Guessing the last End Point based on Intersection """
vx99 = Vector(outlineP1P2[i].x, outlineP1P2[i].y, 0)
l99 = Part.LineSegment(
edges[i].Vertexes[0].Point, edges[i].Vertexes[1].Point
)
try:
distFrom1stParameter = l99.parameter(vx99)
distFrom2ndParameter = l99.length() - distFrom1stParameter
if distFrom2ndParameter > distFrom1stParameter:
foundEnd = edges[i].Vertexes[1].Point
else:
foundEnd = edges[i].Vertexes[0].Point
except Exception:
print("Intersection point Not on this edge")
outlineP1P2Ordered[0] = foundStart
outlineP1P2Ordered[i + 1] = foundEnd
return (
outline,
outlineP1P2,
outlineP3P4,
vBase,
outlineP1P2Closed,
outlineP3P4Closed,
outlineP1P2Ordered,
pArc,
pArc1,
pArc2,
)
@staticmethod
def findLineArcIntersections(
p1last,
p2last,
p3last,
p4last,
p1this,
p2this,
p3this,
p4this,
lastEdgeIsLineSegmentBool,
thisEdgeIsLineSegmentBool,
lastEdgeIsCircleBool,
thisEdgeIsCircleBool,
pArc1last,
pArc2last,
pArc1this,
pArc2this,
):
if lastEdgeIsLineSegmentBool and thisEdgeIsLineSegmentBool:
intersectionsP1P2 = DraftGeomUtils.findIntersection(
p1last, p2last, p1this, p2this, True, True
)
intersectionsP3P4 = DraftGeomUtils.findIntersection(
p3last, p4last, p3this, p4this, True, True
)
return intersectionsP1P2[0], intersectionsP3P4[0]
else:
if lastEdgeIsCircleBool:
edge1 = Part.Arc(
p1last, pArc1last, p2last
).toShape() # edge1 = Part.Arc(p1[i-1],pArc1[i-1],p2[i-1]).toShape()
edge1a = Part.Arc(
p3last, pArc2last, p4last
).toShape() # edge1a = Part.Arc(p3[i-1],pArc2[i-1],p4[i-1]).toShape()
else:
edge1 = Part.LineSegment(
p1last, p2last
).toShape() # edge1 = Part.LineSegment(p1[i-1],p2[i-1]).toShape()
edge1a = Part.LineSegment(
p3last, p4last
).toShape() # edge1a = Part.LineSegment(p3[i-1],p4[i-1]).toShape()
if thisEdgeIsCircleBool: # why it is Part.Circle for an Arc Edge? - why Part.ArcOfCircle Not Working?
edge2 = Part.Arc(
p1this, pArc1this, p2this
).toShape() # edge2 = Part.Arc(p1[i],pArc1[i],p2[i]).toShape()
edge2a = Part.Arc(
p3this, pArc2this, p4this
).toShape() # edge2a = Part.Arc(p3[i],pArc2[i],p4[i]).toShape()
else:
edge2 = Part.LineSegment(
p1this, p2this
).toShape() # edge2 = Part.LineSegment(p1[i],p2[i]).toShape()
edge2a = Part.LineSegment(
p3this, p4this
).toShape() # edge2a = Part.LineSegment(p3[i],p4[i]).toShape()
intersections = DraftGeomUtils.findIntersection(edge1, edge2, True, True)
enum_intersections = enumerate(intersections)
distList = []
for n, intersectionI in enum_intersections:
distList.append(
(intersectionI - p1this).Length
) # distList.append((intersectionI-p1[i]).Length)) # TODO just use p1[i] for test; may be p2[i-1]...?
# TODO - To test and follow up if none intersection is found
nearestIntersectionIndex = distList.index(min(distList))
nearestIntersectionP1P2 = intersections[nearestIntersectionIndex]
intersections = DraftGeomUtils.findIntersection(edge1a, edge2a, True, True)
enum_intersections = enumerate(intersections)
distList = []
for n, intersectionI in enum_intersections:
distList.append(
(intersectionI - p4this).Length
) # distList.append((intersectionI-p4[i]).Length)) # TODO just use p4[i] for test; may be p3[i-1]...?
nearestIntersectionIndex = distList.index(min(distList))
nearestIntersectionP3P4 = intersections[nearestIntersectionIndex]
return nearestIntersectionP1P2, nearestIntersectionP3P4
@staticmethod
def vbaseFollowLastSegment(obj, vBase):
if obj.LastSegment:
lastSegmentAbsTop = obj.LastSegment.AbsTop
vBase = Vector(
vBase.x, vBase.y, lastSegmentAbsTop.z
) # use Last Segment top's z-coordinate
return vBase
# Add flag (temporarily?) for indicating which method call this to determine whether the landing has been 're-based' before or not
def makeStraightLanding(
self, obj, edge, numberofsteps=None, callByMakeStraightStairsWithLanding=False
): # what is use of numberofsteps ?
"builds a landing from a straight edge"
# general data
if not numberofsteps:
numberofsteps = obj.NumberOfSteps
v = DraftGeomUtils.vec(edge)
vLength = Vector(v.x, v.y, 0)
vWidth = vWidth = DraftVecUtils.scaleTo(
vLength.cross(Vector(0, 0, 1)), obj.Width.Value
)
vBase = edge.Vertexes[0].Point
# if not call by makeStraightStairsWithLanding() - not 're-base' in function there, then 're-base' here
if not callByMakeStraightStairsWithLanding:
vBase = self.vbaseFollowLastSegment(obj, vBase)
obj.AbsTop = vBase
if not obj.Flight in ["HalfTurnLeft", "HalfTurnRight"]:
vNose = DraftVecUtils.scaleTo(vLength, -abs(obj.Nosing.Value))
else:
vNose = Vector(0, 0, 0)
h = 0
l = 0
if obj.RiserHeightEnforce != 0:
h = obj.RiserHeightEnforce * numberofsteps
elif obj.Base: # TODO - should this happen? - though in original code
if hasattr(obj.Base, "Shape"):
# l = obj.Base.Shape.Length
# if obj.Base.Shape.BoundBox.ZLength:
if (
round(obj.Base.Shape.BoundBox.ZLength, Draft.precision()) != 0
): # ? - need precision
h = obj.Base.Shape.BoundBox.ZLength # .Value?
else:
print("obj.Base has 0 z-value")
print(h)
if (h == 0) and obj.Height.Value != 0:
h = obj.Height.Value
else:
print(h)
if obj.TreadDepthEnforce != 0:
l = obj.TreadDepthEnforce.Value * (numberofsteps - 2)
if obj.LandingDepth:
l += obj.LandingDepth.Value
else:
l += obj.Width.Value
elif obj.Base:
if hasattr(obj.Base, "Shape"):
l = obj.Base.Shape.Length # .Value?
elif obj.Length.Value != 0:
l = obj.Length.Value
if obj.LandingDepth:
fLength = float(l - obj.LandingDepth.Value) / (numberofsteps - 2)
else:
fLength = float(l - obj.Width.Value) / (numberofsteps - 2)
fHeight = float(h) / numberofsteps
a = math.atan(fHeight / fLength)
print("landing data:", fLength, ":", fHeight)
# step
p1 = self.align(vBase, obj.Align, vWidth)
p1o = p1.add(Vector(0, 0, -abs(obj.TreadThickness.Value)))
p1 = p1.add(vNose).add(Vector(0, 0, -abs(obj.TreadThickness.Value)))
p2 = p1.add(DraftVecUtils.neg(vNose)).add(vLength)
p3 = p2.add(vWidth)
p4 = p3.add(DraftVecUtils.neg(vLength)).add(vNose)
p4o = p3.add(DraftVecUtils.neg(vLength))
if not callByMakeStraightStairsWithLanding:
p2o = p2
p3o = p3
if callByMakeStraightStairsWithLanding:
if obj.Flight == "HalfTurnLeft":
p1 = p1.add(-vWidth)
p2 = p2.add(-vWidth)
elif obj.Flight == "HalfTurnRight":
p3 = p3.add(vWidth)
p4 = p4.add(vWidth)
step = Part.Face(Part.makePolygon([p1, p2, p3, p4, p1]))
if obj.TreadThickness.Value:
step = step.extrude(Vector(0, 0, abs(obj.TreadThickness.Value)))
self.steps.append(step)
else:
self.pseudosteps.append(step)
# structure
struct = None
p1 = p1.add(DraftVecUtils.neg(vNose))
p2 = p1.add(Vector(0, 0, -(abs(fHeight) - obj.TreadThickness.Value)))
p3 = p2.add(vLength)
p4 = p1.add(vLength)
if obj.Structure == "Massive":
if obj.StructureThickness.Value:
struct = Part.Face(Part.makePolygon([p1, p2, p3, p4, p1]))
evec = vWidth
mvec = FreeCAD.Vector(0, 0, 0)
if obj.StructureOffset.Value:
mvec = DraftVecUtils.scaleTo(vWidth, obj.StructureOffset.Value)
struct.translate(mvec)
if obj.Flight in ["HalfTurnLeft", "HalfTurnRight"]:
evec = DraftVecUtils.scaleTo(
evec, 2 * evec.Length - 2 * mvec.Length
)
else:
evec = DraftVecUtils.scaleTo(evec, evec.Length - (2 * mvec.Length))
struct = struct.extrude(evec)
elif obj.Structure in ["One stringer", "Two stringers"]:
if obj.StringerWidth.Value and obj.StructureThickness.Value:
reslength = fHeight / math.tan(a)
p1b = p1.add(DraftVecUtils.scaleTo(vLength, reslength))
p1c = p1.add(Vector(0, 0, -fHeight))
reslength = obj.StructureThickness.Value / math.cos(a)
p1d = p1c.add(Vector(0, 0, -reslength))
reslength = obj.StructureThickness.Value * math.tan(a / 2)
p2 = p1b.add(DraftVecUtils.scaleTo(vLength, reslength)).add(
Vector(0, 0, -obj.StructureThickness.Value)
)
p3 = p4.add(DraftVecUtils.scaleTo(vLength, reslength)).add(
Vector(0, 0, -obj.StructureThickness.Value)
)
if obj.TreadThickness.Value:
reslength = obj.TreadThickness.Value / math.tan(a)
p3c = p4.add(DraftVecUtils.scaleTo(vLength, reslength)).add(
Vector(0, 0, obj.TreadThickness.Value)
)
reslength = obj.StructureThickness.Value / math.sin(a)
p3b = p3c.add(DraftVecUtils.scaleTo(vLength, reslength))
pol = Part.Face(
Part.makePolygon([p1b, p1c, p1d, p2, p3, p3b, p3c, p4, p1b])
)
else:
reslength = obj.StructureThickness.Value / math.sin(a)
p3b = p4.add(DraftVecUtils.scaleTo(vLength, reslength))
pol = Part.Face(Part.makePolygon([p1b, p1c, p1d, p2, p3, p3b, p1b]))
evec = DraftVecUtils.scaleTo(vWidth, obj.StringerWidth.Value)
if obj.Structure == "One stringer":
if obj.StructureOffset.Value:
mvec = DraftVecUtils.scaleTo(vWidth, obj.StructureOffset.Value)
else:
mvec = DraftVecUtils.scaleTo(
vWidth, (vWidth.Length / 2) - obj.StringerWidth.Value / 2
)
pol.translate(mvec)
struct = pol.extrude(evec)
elif obj.Structure == "Two stringers":
pol2 = pol.copy()
if obj.StructureOffset.Value:
mvec = DraftVecUtils.scaleTo(vWidth, obj.StructureOffset.Value)
pol.translate(mvec)
mvec = vWidth.add(mvec.negative())
pol2.translate(mvec)
else:
pol2.translate(vWidth)
s1 = pol.extrude(evec)
s2 = pol2.extrude(evec.negative())
struct = Part.makeCompound([s1, s2])
# Overwriting result of above functions if case fit - should better avoid running the above in first place (better rewrite later)
if not callByMakeStraightStairsWithLanding:
if obj.StructureThickness.Value:
struct = None
landingFace = Part.Face(Part.makePolygon([p1o, p2o, p3o, p4o, p1o]))
struct = landingFace.extrude(
Vector(0, 0, -abs(obj.StructureThickness.Value))
)
if struct:
self.structures.append(struct)
def makeStraightStairs(
self,
obj,
edge,
s1,
s2,
numberofsteps=None,
downstartstairs=None,
endstairsup=None,
):
"builds a simple, straight staircase from a straight edge"
# Upgrade obj if it is from an older version of FreeCAD
if not hasattr(obj, "StringerOverlap"):
obj.addProperty(
"App::PropertyLength",
"StringerOverlap",
"Structure",
QT_TRANSLATE_NOOP(
"App::Property",
"The overlap of the stringers above the bottom of the treads",
),
)
# general data
if not numberofsteps:
numberofsteps = obj.NumberOfSteps
# if not numberofsteps - not call by makeStraightStairsWithLanding()
# if not 're-base' there (StraightStair is part of StraightStairsWithLanding 'flight'), then 're-base' here (StraightStair is individual 'flight')
callByMakeStraightStairsWithLanding = False
else:
callByMakeStraightStairsWithLanding = True
if not downstartstairs:
downstartstairs = obj.ConnectionDownStartStairs
if not endstairsup:
endstairsup = obj.ConnectionEndStairsUp
v = DraftGeomUtils.vec(edge)
vLength = DraftVecUtils.scaleTo(v, float(edge.Length) / (numberofsteps - 1))
vLength = Vector(vLength.x, vLength.y, 0)
if round(v.z, Draft.precision()) != 0:
h = v.z
else:
h = obj.Height.Value
vHeight = Vector(0, 0, float(h) / numberofsteps)
vWidth = DraftVecUtils.scaleTo(vLength.cross(Vector(0, 0, 1)), obj.Width.Value)
vBase = edge.Vertexes[0].Point
if not callByMakeStraightStairsWithLanding:
if obj.LastSegment:
print("obj.LastSegment is: ")
print(obj.LastSegment.Name)
lastSegmentAbsTop = obj.LastSegment.AbsTop
print("lastSegmentAbsTop is: ")
print(lastSegmentAbsTop)
vBase = Vector(
vBase.x, vBase.y, lastSegmentAbsTop.z
) # use Last Segment top's z-coordinate
obj.AbsTop = vBase.add(Vector(0, 0, h))
vNose = DraftVecUtils.scaleTo(vLength, -abs(obj.Nosing.Value))
a = math.atan(vHeight.Length / vLength.Length)
vBasedAligned = self.align(vBase, obj.Align, vWidth)
vRiserThickness = DraftVecUtils.scaleTo(
vLength, obj.RiserThickness.Value
) # 50)
# steps and risers
for i in range(numberofsteps - 1):
# p1 = vBase.add((Vector(vLength).multiply(i)).add(Vector(vHeight).multiply(i+1)))
p1 = vBasedAligned.add(
(Vector(vLength).multiply(i)).add(Vector(vHeight).multiply(i + 1))
)
# p1 = self.align(p1,obj.Align,vWidth)
# p1 = p1.add(vNose).add(Vector(0,0,-abs(obj.TreadThickness.Value)))
p1 = p1.add(Vector(0, 0, -abs(obj.TreadThickness.Value)))
r1 = p1
p1 = p1.add(vNose)
p2 = p1.add(DraftVecUtils.neg(vNose)).add(vLength)
p3 = p2.add(vWidth)
p4 = p3.add(DraftVecUtils.neg(vLength)).add(vNose)
step = Part.Face(Part.makePolygon([p1, p2, p3, p4, p1]))
if obj.TreadThickness.Value:
step = step.extrude(Vector(0, 0, abs(obj.TreadThickness.Value)))
self.steps.append(step)
else:
self.pseudosteps.append(step)
""" risers - add to steps or pseudosteps in the meantime before adding self.risers / self.pseudorisers """
# vResHeight = vHeight.add(Vector(0,0,-abs(obj.TreadThickness.Value)))
r2 = r1.add(DraftVecUtils.neg(vHeight)) # vResHeight
if i == 0:
r2 = r2.add(Vector(0, 0, abs(obj.TreadThickness.Value)))
r3 = r2.add(vWidth)
r4 = r3.add(vHeight) # vResHeight
if i == 0:
r4 = r4.add(Vector(0, 0, -abs(obj.TreadThickness.Value)))
riser = Part.Face(Part.makePolygon([r1, r2, r3, r4, r1]))
if obj.RiserThickness.Value:
riser = riser.extrude(vRiserThickness) # Vector(0,100,0))
self.steps.append(riser)
else:
self.pseudosteps.append(riser)
##
# structure
lProfile = []
struct = None
if obj.Structure == "Massive":
if obj.StructureThickness.Value:
# '# Massive Structure to respect 'align' attribute'
vBase = vBasedAligned.add(vRiserThickness)
for i in range(numberofsteps - 1):
if not lProfile:
lProfile.append(vBase)
last = lProfile[-1]
if len(lProfile) == 1:
last = last.add(Vector(0, 0, -abs(obj.TreadThickness.Value)))
lProfile.append(last.add(vHeight))
lProfile.append(lProfile[-1].add(vLength))
lProfile[-1] = lProfile[-1].add(-vRiserThickness)
resHeight1 = obj.StructureThickness.Value / math.cos(a)
dh = s2 - float(h) / numberofsteps
resHeight2 = ((numberofsteps - 1) * vHeight.Length) - dh
if endstairsup == "toFlightThickness":
lProfile.append(lProfile[-1].add(Vector(0, 0, -resHeight1)))
resHeight2 = ((numberofsteps - 1) * vHeight.Length) - (
resHeight1 + obj.TreadThickness.Value
)
resLength = (vLength.Length / vHeight.Length) * resHeight2
h = DraftVecUtils.scaleTo(vLength, -resLength)
elif endstairsup == "toSlabThickness":
resLength = (vLength.Length / vHeight.Length) * resHeight2
h = DraftVecUtils.scaleTo(vLength, -resLength)
th = (resHeight1 + obj.TreadThickness.Value) - dh
resLength2 = th / math.tan(a)
lProfile.append(
lProfile[-1].add(Vector(0, 0, obj.TreadThickness.Value - dh))
)
lProfile.append(
lProfile[-1].add(DraftVecUtils.scaleTo(vLength, resLength2))
)
if s1 > resHeight1:
downstartstairs = "VerticalCut"
if downstartstairs == "VerticalCut":
dh = (
obj.DownSlabThickness.Value
- resHeight1
- obj.TreadThickness.Value
)
resHeight2 = resHeight2 + obj.DownSlabThickness.Value - dh
resLength = (vLength.Length / vHeight.Length) * resHeight2
lProfile.append(
lProfile[-1]
.add(DraftVecUtils.scaleTo(vLength, -resLength))
.add(Vector(0, 0, -resHeight2))
)
elif downstartstairs == "HorizontalVerticalCut":
temp_s1 = s1
if obj.UpSlabThickness.Value > resHeight1:
s1 = temp_s1
resHeight2 = resHeight2 + s1
resLength = (vLength.Length / vHeight.Length) * resHeight2
th = (resHeight1 - s1) + obj.TreadThickness.Value
resLength2 = th / math.tan(a)
lProfile.append(
lProfile[-1]
.add(DraftVecUtils.scaleTo(vLength, -resLength))
.add(Vector(0, 0, -resHeight2))
)
lProfile.append(
lProfile[-1].add(DraftVecUtils.scaleTo(vLength, -resLength2))
)
else:
lProfile.append(lProfile[-1].add(Vector(h.x, h.y, -resHeight2)))
lProfile.append(vBase)
pol = Part.makePolygon(lProfile)
struct = Part.Face(pol)
evec = vWidth
if obj.StructureOffset.Value:
mvec = DraftVecUtils.scaleTo(vWidth, obj.StructureOffset.Value)
struct.translate(mvec)
evec = DraftVecUtils.scaleTo(evec, evec.Length - (2 * mvec.Length))
struct = struct.extrude(evec)
elif obj.Structure in ["One stringer", "Two stringers"]:
if obj.StringerWidth.Value and obj.StructureThickness.Value:
hyp = math.sqrt(vHeight.Length**2 + vLength.Length**2)
l1 = Vector(vLength).multiply(numberofsteps - 1)
h1 = (
Vector(vHeight)
.multiply(numberofsteps - 1)
.add(
Vector(
0,
0,
-abs(obj.TreadThickness.Value) + obj.StringerOverlap.Value,
)
)
)
p1 = vBase.add(l1).add(h1)
p1 = self.align(p1, obj.Align, vWidth)
if obj.StringerOverlap.Value <= float(h) / numberofsteps:
lProfile.append(p1)
else:
p1b = vBase.add(l1).add(Vector(0, 0, float(h)))
p1a = p1b.add(
Vector(vLength).multiply((p1b.z - p1.z) / vHeight.Length)
)
lProfile.append(p1a)
lProfile.append(p1b)
h2 = (obj.StructureThickness.Value / vLength.Length) * hyp
lProfile.append(p1.add(Vector(0, 0, -abs(h2))))
h3 = lProfile[-1].z - vBase.z
l3 = (h3 / vHeight.Length) * vLength.Length
v3 = DraftVecUtils.scaleTo(vLength, -l3)
lProfile.append(lProfile[-1].add(Vector(0, 0, -abs(h3))).add(v3))
l4 = (obj.StructureThickness.Value / vHeight.Length) * hyp
v4 = DraftVecUtils.scaleTo(vLength, -l4)
lProfile.append(lProfile[-1].add(v4))
lProfile.append(lProfile[0])
# print(lProfile)
pol = Part.makePolygon(lProfile)
pol = Part.Face(pol)
evec = DraftVecUtils.scaleTo(vWidth, obj.StringerWidth.Value)
if obj.Structure == "One stringer":
if obj.StructureOffset.Value:
mvec = DraftVecUtils.scaleTo(vWidth, obj.StructureOffset.Value)
else:
mvec = DraftVecUtils.scaleTo(
vWidth, (vWidth.Length / 2) - obj.StringerWidth.Value / 2
)
pol.translate(mvec)
struct = pol.extrude(evec)
elif obj.Structure == "Two stringers":
pol2 = pol.copy()
if obj.StructureOffset.Value:
mvec = DraftVecUtils.scaleTo(vWidth, obj.StructureOffset.Value)
pol.translate(mvec)
mvec = vWidth.add(mvec.negative())
pol2.translate(mvec)
else:
pol2.translate(vWidth)
s1 = pol.extrude(evec)
s2 = pol2.extrude(evec.negative())
struct = Part.makeCompound([s1, s2])
if struct:
self.structures.append(struct)
def makeStraightStairsWithLanding(self, obj, edge):
"builds a straight staircase with/without a landing in the middle"
if obj.NumberOfSteps < 2:
print("Fewer than 2 steps, unable to create/update stairs")
return
v = DraftGeomUtils.vec(edge)
v_proj = Vector(v.x, v.y, 0) # Projected on XY plane.
landing = 0
if obj.TreadDepthEnforce == 0:
if obj.Landings == "At center" and obj.NumberOfSteps > 3:
if obj.LandingDepth:
reslength = v_proj.Length - obj.LandingDepth.Value
else:
reslength = v_proj.Length - obj.Width.Value
treadDepth = reslength / (obj.NumberOfSteps - 2)
else:
reslength = v_proj.Length
treadDepth = reslength / (obj.NumberOfSteps - 1)
obj.TreadDepth = treadDepth
vLength = DraftVecUtils.scaleTo(v_proj, treadDepth)
else:
obj.TreadDepth = obj.TreadDepthEnforce
vLength = DraftVecUtils.scaleTo(v_proj, obj.TreadDepthEnforce.Value)
vWidth = DraftVecUtils.scaleTo(vLength.cross(Vector(0, 0, 1)), obj.Width.Value)
p1 = edge.Vertexes[0].Point
if obj.RiserHeightEnforce == 0:
if round(v.z, Draft.precision()) != 0:
h = v.z
else:
h = obj.Height.Value
hstep = h / obj.NumberOfSteps
obj.RiserHeight = hstep
else:
h = obj.RiserHeightEnforce.Value * (obj.NumberOfSteps)
hstep = obj.RiserHeightEnforce.Value
obj.RiserHeight = hstep
if obj.Landings == "At center" and obj.NumberOfSteps > 3:
landing = int(obj.NumberOfSteps / 2)
else:
landing = obj.NumberOfSteps
if obj.LastSegment:
lastSegmentAbsTop = obj.LastSegment.AbsTop
p1 = Vector(
p1.x, p1.y, lastSegmentAbsTop.z
) # use Last Segment top's z-coordinate
obj.AbsTop = p1.add(Vector(0, 0, h))
p2 = p1.add(
DraftVecUtils.scale(vLength, landing - 1).add(Vector(0, 0, landing * hstep))
)
if obj.Landings == "At center" and obj.NumberOfSteps > 3:
if obj.LandingDepth:
p3 = p2.add(DraftVecUtils.scaleTo(vLength, obj.LandingDepth.Value))
else:
p3 = p2.add(DraftVecUtils.scaleTo(vLength, obj.Width.Value))
if obj.Flight in ["HalfTurnLeft", "HalfTurnRight"]:
if (obj.Align == "Left" and obj.Flight == "HalfTurnLeft") or (
obj.Align == "Right" and obj.Flight == "HalfTurnRight"
):
p3r = p2
elif obj.Align == "Left" and obj.Flight == "HalfTurnRight":
p3r = self.align(
p2, "Right", -2 * vWidth
) # -ve / opposite direction of "Right" - no "Left" in _Stairs.Align()
elif obj.Align == "Right" and obj.Flight == "HalfTurnLeft":
p3r = self.align(p2, "Right", 2 * vWidth)
elif obj.Align == "Center" and obj.Flight == "HalfTurnLeft":
p3r = self.align(p2, "Right", vWidth)
elif obj.Align == "Center" and obj.Flight == "HalfTurnRight":
p3r = self.align(
p2, "Right", -vWidth
) # -ve / opposite direction of "Right" - no "Left" in _Stairs.Align()
else:
print("Should have a bug here, if see this")
if p3r:
p4r = p3r.add(
DraftVecUtils.scale(
-vLength, obj.NumberOfSteps - (landing + 1)
).add(Vector(0, 0, (obj.NumberOfSteps - landing) * hstep))
)
else:
p4 = p3.add(
DraftVecUtils.scale(vLength, obj.NumberOfSteps - (landing + 1)).add(
Vector(0, 0, (obj.NumberOfSteps - landing) * hstep)
)
)
self.makeStraightLanding(
obj, Part.LineSegment(p2, p3).toShape(), None, True
)
if obj.Flight in ["HalfTurnLeft", "HalfTurnRight"]:
self.makeStraightStairs(
obj,
Part.LineSegment(p3r, p4r).toShape(),
obj.RiserHeight.Value,
obj.UpSlabThickness.Value,
obj.NumberOfSteps - landing,
"HorizontalVerticalCut",
None,
)
else:
self.makeStraightStairs(
obj,
Part.LineSegment(p3, p4).toShape(),
obj.RiserHeight.Value,
obj.UpSlabThickness.Value,
obj.NumberOfSteps - landing,
"HorizontalVerticalCut",
None,
)
self.makeStraightStairs(
obj,
Part.LineSegment(p1, p2).toShape(),
obj.DownSlabThickness.Value,
obj.RiserHeight.Value,
landing,
None,
"toSlabThickness",
)
else:
if obj.Landings == "At center":
print("Fewer than 4 steps, unable to create landing")
self.makeStraightStairs(
obj,
Part.LineSegment(p1, p2).toShape(),
obj.DownSlabThickness.Value,
obj.UpSlabThickness.Value,
landing,
None,
None,
)
print(p1, p2)
if obj.Landings == "At center" and obj.NumberOfSteps > 3:
if obj.Flight not in ["HalfTurnLeft", "HalfTurnRight"]:
print(p3, p4)
elif obj.Flight in ["HalfTurnLeft", "HalfTurnRight"]:
print(p3r, p4r)
edge = Part.LineSegment(p1, p2).toShape()
(
outlineNotUsed,
outlineRailL,
outlineRailR,
vBase2,
outlineP1P2ClosedNU,
outlineP3P4ClosedNU,
NU,
pArc,
pArc1,
pArc2,
) = self.returnOutlines(
obj,
edge,
obj.Align,
None,
obj.Width,
obj.WidthOfLanding,
obj.TreadThickness,
obj.RiserHeight,
obj.RailingOffsetLeft,
obj.RailingOffsetRight,
obj.RailingHeightLeft,
obj.RailingHeightRight,
True,
)
self.connectRailingVector(obj, outlineRailL, outlineRailR, pArc1, pArc2)
def connectRailingVector(
self, obj, outlineRailL, outlineRailR, pArcRailL, pArcRailR
):
obj.OutlineLeft = outlineRailL # outlineL # outlineP1P2
obj.OutlineRight = outlineRailR # outlineR # outlineP3P4
self.OutlineRailArcLeft = pArcRailL # obj.OutlineRailArcLeft = pArcRailL
self.OutlineRailArcRight = pArcRailR # obj.OutlineRailArcRight = pArcRailR
(
outlineLeftAll,
outlineRightAll,
outlineRailArcLeftAll,
outlineRailArcRightAll,
) = ([], [], [], [])
outlineRightAll.extend(obj.OutlineRight)
outlineRailArcRightAll = self.OutlineRailArcRight
if obj.LastSegment:
if obj.LastSegment.OutlineLeftAll:
outlineLeftAll.extend(obj.LastSegment.OutlineLeftAll)
if obj.LastSegment.Proxy.OutlineRailArcLeftAll: # need if?
outlineRailArcLeftAll.extend(
obj.LastSegment.Proxy.OutlineRailArcLeftAll
)
if (
(outlineLeftAll[-1] - obj.OutlineLeft[0]).Length < 0.01
): # To avoid 2 points overlapping fail creating LineSegment # TODO to allow tolerance Part.LineSegment / edge.toShape() allow?
# no need abs() after .Length right?
del outlineLeftAll[-1]
del outlineRailArcLeftAll[-1]
if (
outlineRightAll[-1] - obj.LastSegment.OutlineRightAll[0]
).Length < 0.01: # See above
del outlineRightAll[-1]
del outlineRailArcRightAll[-1]
if obj.LastSegment.OutlineRightAll: # need if?
outlineRightAll.extend(obj.LastSegment.OutlineRightAll)
if obj.LastSegment.Proxy.OutlineRailArcRightAll: # need if?
outlineRailArcRightAll.extend(
obj.LastSegment.Proxy.OutlineRailArcRightAll
)
outlineLeftAll.extend(obj.OutlineLeft)
outlineRailArcLeftAll.extend(self.OutlineRailArcLeft)
obj.OutlineLeftAll = outlineLeftAll
obj.OutlineRightAll = outlineRightAll
self.OutlineRailArcLeftAll = outlineRailArcLeftAll
self.OutlineRailArcRightAll = outlineRailArcRightAll
def makeCurvedStairs(self, obj, edge):
print("Not yet implemented!")
def makeCurvedStairsWithLanding(self, obj, edge):
print("Not yet implemented!")
class _ViewProviderStairs(ArchComponent.ViewProviderComponent):
"A View Provider for Stairs"
def __init__(self, vobj):
ArchComponent.ViewProviderComponent.__init__(self, vobj)
def getIcon(self):
import Arch_rc
return ":/icons/Arch_Stairs_Tree.svg"
def claimChildren(self):
"Define which objects will appear as children in the tree view"
if hasattr(self, "Object"):
obj = self.Object
lst = []
if hasattr(obj, "Base"):
lst.append(obj.Base)
if hasattr(obj, "RailingLeft"):
lst.append(obj.RailingLeft)
if hasattr(obj, "RailingRight"):
lst.append(obj.RailingRight)
if hasattr(obj, "Additions"):
lst.extend(obj.Additions)
if hasattr(obj, "Subtractions"):
lst.extend(obj.Subtractions)
return lst
return []
if FreeCAD.GuiUp:
FreeCADGui.addCommand("Arch_Stairs", _CommandStairs())
|
models | escalation_chain | import typing
from apps.alerts.models.escalation_policy import (
generate_public_primary_key_for_escalation_policy,
)
from common.public_primary_keys import (
generate_public_primary_key,
increase_public_primary_key_length,
)
from django.conf import settings
from django.core.validators import MinLengthValidator
from django.db import models, transaction
if typing.TYPE_CHECKING:
from apps.alerts.models import ChannelFilter, EscalationPolicy
from django.db.models.manager import RelatedManager
def generate_public_primary_key_for_escalation_chain():
prefix = "F"
new_public_primary_key = generate_public_primary_key(prefix)
failure_counter = 0
while EscalationChain.objects.filter(
public_primary_key=new_public_primary_key
).exists():
new_public_primary_key = increase_public_primary_key_length(
failure_counter=failure_counter, prefix=prefix, model_name="EscalationChain"
)
failure_counter += 1
return new_public_primary_key
class EscalationChain(models.Model):
channel_filters: "RelatedManager['ChannelFilter']"
escalation_policies: "RelatedManager['EscalationPolicy']"
public_primary_key = models.CharField(
max_length=20,
validators=[MinLengthValidator(settings.PUBLIC_PRIMARY_KEY_MIN_LENGTH + 1)],
unique=True,
default=generate_public_primary_key_for_escalation_chain,
)
organization = models.ForeignKey(
"user_management.Organization",
on_delete=models.CASCADE,
related_name="escalation_chains",
)
team = models.ForeignKey(
"user_management.Team",
on_delete=models.SET_NULL,
related_name="escalation_chains",
null=True,
default=None,
)
name = models.CharField(max_length=100)
class Meta:
unique_together = ("organization", "name")
def __str__(self):
return f"{self.pk}: {self.name}"
def make_copy(self, copy_name: str, team):
with transaction.atomic():
copied_chain = EscalationChain.objects.create(
organization=self.organization,
team=team,
name=copy_name,
)
for escalation_policy in self.escalation_policies.all():
# https://docs.djangoproject.com/en/3.2/topics/db/queries/#copying-model-instances
notify_to_users_queue = escalation_policy.notify_to_users_queue.all()
escalation_policy.pk = None
escalation_policy.public_primary_key = (
generate_public_primary_key_for_escalation_policy()
)
escalation_policy.last_notified_user = None
escalation_policy.escalation_chain = copied_chain
escalation_policy.save()
escalation_policy.notify_to_users_queue.set(notify_to_users_queue)
return copied_chain
# Insight logs
@property
def insight_logs_type_verbal(self):
return "escalation_chain"
@property
def insight_logs_verbal(self):
return self.name
@property
def insight_logs_serialized(self):
result = {
"name": self.name,
}
if self.team:
result["team"] = self.team.name
result["team_id"] = self.team.public_primary_key
else:
result["team"] = "General"
return result
@property
def insight_logs_metadata(self):
result = {}
if self.team:
result["team"] = self.team.name
result["team_id"] = self.team.public_primary_key
else:
result["team"] = "General"
return result
|
downloaders | FileuploadNet | # -*- coding: utf-8 -*-
from ..anticaptchas.ReCaptcha import ReCaptcha
from ..base.simple_downloader import SimpleDownloader
class FileuploadNet(SimpleDownloader):
__name__ = "FileuploadNet"
__type__ = "downloader"
__version__ = "0.08"
__status__ = "testing"
__pattern__ = r"https?://(?:www\.)?file-upload\.net/(?:en/)?download-\d+/.+"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
]
__description__ = """File-upload.net downloader plugin"""
__license__ = "GPLv3"
__authors__ = [
("zapp-brannigan", "fuerst.reinje@web.de"),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com"),
]
NAME_PATTERN = r"<title>File-Upload.net - (?P<N>.+?)<"
SIZE_PATTERN = r"</label><span>(?P<S>[\d.,]+) (?P<U>[\w^_]+)"
OFFLINE_PATTERN = r"Datei existiert nicht"
LINK_FREE_PATTERN = r"<a href='(.+?)' title='download' onclick"
def setup(self):
self.multi_dl = True
self.chunk_limit = 1
def handle_free(self, pyfile):
action, inputs = self.parse_html_form('id="downloadstart"')
if not inputs:
self.fail(self._("download form not found"))
self.captcha = ReCaptcha(pyfile)
captcha_key = self.captcha.detect_key()
if captcha_key:
result = self.captcha.challenge(captcha_key, version="2invisible")
inputs["g-recaptcha-response"] = result
self.download(action, post=inputs)
else:
self.fail(self._("ReCaptcha key not found"))
|
update | update_languages | #!/usr/bin/env python
# This script generates languages.py from intersecting each engine's supported languages.
#
# Output files: searx/data/engines_languages.json and searx/languages.py
import json
from pathlib import Path
from pprint import pformat
from babel import Locale, UnknownLocaleError
from babel.languages import get_global
from searx import searx_dir, settings
from searx.engines import engines, initialize_engines
# Output files.
engines_languages_file = Path(searx_dir) / "data" / "engines_languages.json"
languages_file = Path(searx_dir) / "languages.py"
# Fetches supported languages for each engine and writes json file with those.
def fetch_supported_languages():
engines_languages = dict()
names = list(engines)
names.sort()
for engine_name in names:
if hasattr(engines[engine_name], "fetch_supported_languages"):
engines_languages[engine_name] = engines[
engine_name
].fetch_supported_languages()
print(
"fetched %s languages from engine %s"
% (len(engines_languages[engine_name]), engine_name)
)
if type(engines_languages[engine_name]) == list:
engines_languages[engine_name] = sorted(engines_languages[engine_name])
# write json file
with open(engines_languages_file, "w", encoding="utf-8") as f:
json.dump(engines_languages, f, indent=2, sort_keys=True)
return engines_languages
# Get babel Locale object from lang_code if possible.
def get_locale(lang_code):
try:
locale = Locale.parse(lang_code, sep="-")
return locale
except (UnknownLocaleError, ValueError):
return None
# Join all language lists.
def join_language_lists(engines_languages):
language_list = dict()
for engine_name in engines_languages:
for lang_code in engines_languages[engine_name]:
# apply custom fixes if necessary
if (
lang_code
in getattr(engines[engine_name], "language_aliases", {}).values()
):
lang_code = next(
lc
for lc, alias in engines[engine_name].language_aliases.items()
if lang_code == alias
)
locale = get_locale(lang_code)
# ensure that lang_code uses standard language and country codes
if locale and locale.territory:
lang_code = "{lang}-{country}".format(
lang=locale.language, country=locale.territory
)
short_code = lang_code.split("-")[0]
# add language without country if not in list
if short_code not in language_list:
if locale:
# get language's data from babel's Locale object
language_name = locale.get_language_name().title()
english_name = locale.english_name.split(" (")[0]
elif short_code in engines_languages["wikipedia"]:
# get language's data from wikipedia if not known by babel
language_name = engines_languages["wikipedia"][short_code]["name"]
english_name = engines_languages["wikipedia"][short_code][
"english_name"
]
else:
language_name = None
english_name = None
# add language to list
language_list[short_code] = {
"name": language_name,
"english_name": english_name,
"counter": set(),
"countries": dict(),
}
# add language with country if not in list
if (
lang_code != short_code
and lang_code not in language_list[short_code]["countries"]
):
country_name = ""
if locale:
# get country name from babel's Locale object
country_name = locale.get_territory_name()
language_list[short_code]["countries"][lang_code] = {
"country_name": country_name,
"counter": set(),
}
# count engine for both language_country combination and language alone
language_list[short_code]["counter"].add(engine_name)
if lang_code != short_code:
language_list[short_code]["countries"][lang_code]["counter"].add(
engine_name
)
return language_list
# Filter language list so it only includes the most supported languages and countries
def filter_language_list(all_languages):
min_engines_per_lang = 15
min_engines_per_country = 10
main_engines = [
engine_name
for engine_name in engines.keys()
if "general" in engines[engine_name].categories
and engines[engine_name].supported_languages
and not engines[engine_name].disabled
]
# filter list to include only languages supported by most engines or all default general engines
filtered_languages = {
code: lang
for code, lang in all_languages.items()
if (
len(lang["counter"]) >= min_engines_per_lang
or all(main_engine in lang["counter"] for main_engine in main_engines)
)
}
def _copy_lang_data(lang, country_name=None):
new_dict = dict()
new_dict["name"] = all_languages[lang]["name"]
new_dict["english_name"] = all_languages[lang]["english_name"]
if country_name:
new_dict["country_name"] = country_name
return new_dict
def _country_count(i):
return len(countries[sorted_countries[i]]["counter"])
# for each language get country codes supported by most engines or at least one country code
filtered_languages_with_countries = dict()
for lang, lang_data in filtered_languages.items():
countries = lang_data["countries"]
filtered_countries = dict()
# get language's country codes with enough supported engines
for lang_country, country_data in countries.items():
if len(country_data["counter"]) >= min_engines_per_country:
filtered_countries[lang_country] = _copy_lang_data(
lang, country_data["country_name"]
)
# add language without countries too if there's more than one country to choose from
if len(filtered_countries) > 1:
filtered_countries[lang] = _copy_lang_data(lang)
elif len(filtered_countries) == 1:
# if there's only one country per language, it's not necessary to show country name
lang_country = next(iter(filtered_countries))
filtered_countries[lang_country]["country_name"] = None
# if no country has enough engines try to get most likely country code from babel
if not filtered_countries:
lang_country = None
subtags = get_global("likely_subtags").get(lang)
if subtags:
country_code = subtags.split("_")[-1]
if len(country_code) == 2:
lang_country = "{lang}-{country}".format(
lang=lang, country=country_code
)
if lang_country:
filtered_countries[lang_country] = _copy_lang_data(lang)
else:
filtered_countries[lang] = _copy_lang_data(lang)
filtered_languages_with_countries.update(filtered_countries)
return filtered_languages_with_countries
# Write languages.py.
def write_languages_file(languages):
file_headers = (
"# -*- coding: utf-8 -*-",
"# list of language codes",
"# this file is generated automatically by utils/fetch_languages.py",
"language_codes =",
)
language_codes = tuple(
[
(
code,
languages[code]["name"].split(" (")[0],
languages[code].get("country_name") or "",
languages[code].get("english_name") or "",
)
for code in sorted(languages)
]
)
with open(languages_file, "w") as new_file:
file_content = "{file_headers} \\\n{language_codes}".format(
file_headers="\n".join(file_headers),
language_codes=pformat(language_codes, indent=4),
)
new_file.write(file_content)
new_file.close()
if __name__ == "__main__":
initialize_engines(settings["engines"])
engines_languages = fetch_supported_languages()
all_languages = join_language_lists(engines_languages)
filtered_languages = filter_language_list(all_languages)
write_languages_file(filtered_languages)
|
tools | code_generator | #
# Copyright 2013-2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
""" A code generator (needed by ModToolAdd) """
from mako.template import Template
from ..templates import Templates
from .util_functions import (
str_to_fancyc_comment,
str_to_python_comment,
strip_arg_types,
strip_arg_types_grc,
strip_default_values,
)
GRTYPELIST = {
"sync": "sync_block",
"sink": "sync_block",
"source": "sync_block",
"decimator": "sync_decimator",
"interpolator": "sync_interpolator",
"general": "block",
"tagged_stream": "tagged_stream_block",
"hier": "hier_block2",
"noblock": "",
}
def render_template(tpl_id, **kwargs):
"""Return the parsed and rendered template given by tpl_id"""
# Choose template
tpl = Template(Templates[tpl_id])
# Set up all variables
kwargs["str_to_fancyc_comment"] = str_to_fancyc_comment
kwargs["str_to_python_comment"] = str_to_python_comment
kwargs["strip_default_values"] = strip_default_values
kwargs["strip_arg_types"] = strip_arg_types
kwargs["strip_arg_types_grc"] = strip_arg_types_grc
kwargs["grblocktype"] = GRTYPELIST[kwargs["blocktype"]]
if kwargs["is_component"] or kwargs["version"] in ["310"]:
kwargs["include_dir_prefix"] = "gnuradio/" + kwargs["modname"]
else:
kwargs["include_dir_prefix"] = kwargs["modname"]
# Render and return
return tpl.render(**kwargs)
|
network | cookie_jar | # -*- coding: utf-8 -*-
import time
from datetime import timedelta
class CookieJar:
def __init__(self, pluginname, account=None):
self.cookies = {}
self.plugin = pluginname
self.account = account
def add_cookies(self, clist):
for c in clist:
name = c.split("\t")[5]
self.cookies[name] = c
def get_cookies(self):
return list(self.cookies.values())
def parse_cookie(self, name):
if name in self.cookies:
return self.cookies[name].split("\t")[6]
else:
return None
def get_cookie(self, name):
return self.parse_cookie(name)
def set_cookie(
self,
domain,
name,
value,
path="/",
exp=time.time() + timedelta(days=31).total_seconds(), #: 31 days retention
):
self.cookies[name] = f".{domain}\tTRUE\t{path}\tFALSE\t{exp}\t{name}\t{value}"
def clear(self):
self.cookies = {}
|
draftmake | make_line | # ***************************************************************************
# * Copyright (c) 2009, 2010 Yorik van Havre <yorik@uncreated.net> *
# * Copyright (c) 2009, 2010 Ken Cline <cline@frii.com> *
# * Copyright (c) 2020 FreeCAD Developers *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provides functions to create two-point Wire objects."""
## @package make_line
# \ingroup draftmake
# \brief Provides functions to create two-point Wire objects.
import draftmake.make_wire as make_wire
## \addtogroup draftmake
# @{
import FreeCAD as App
def make_line(first_param, last_param=None):
"""makeLine(first_param, p2)
Creates a line from 2 points or from a given object.
Parameters
----------
first_param :
Base.Vector -> First point of the line (if p2 is None)
Part.LineSegment -> Line is created from the given Linesegment
Shape -> Line is created from the give Shape
last_param : Base.Vector
Second point of the line, if not set the function evaluates
the first_param to look for a Part.LineSegment or a Shape
"""
if last_param:
p1 = first_param
p2 = last_param
else:
if hasattr(first_param, "StartPoint") and hasattr(first_param, "EndPoint"):
p2 = first_param.EndPoint
p1 = first_param.StartPoint
elif hasattr(p1, "Vertexes"):
p2 = first_param.Vertexes[-1].Point
p1 = first_param.Vertexes[0].Point
else:
_err = "Unable to create a line from the given parameters"
App.Console.PrintError(_err + "\n")
return
obj = make_wire.make_wire([p1, p2])
return obj
makeLine = make_line
## @}
|
file-list | gio_wrapper | from __future__ import absolute_import
from gi.repository import Gio, GLib, GObject
from sunflower.plugin_base.provider import Mode
# GFile.read_bytes() has upper limit for size of G_MAXSSIZE (9223372036854775807) which is unsensibly large
MAX_READ_FILE_SIZE = 4 * 1024 * 1024 * 1024
class File:
"""This is a wrapper class that provides file-like object but
uses Gio.File for actual operations."""
def __init__(self, path, mode):
if mode == Mode.READ:
self._resource = Gio.File.new_for_commandline_arg(path).read()
elif mode == Mode.WRITE:
if Gio.File.new_for_commandline_arg(path).query_exists():
Gio.File.new_for_commandline_arg(path).delete()
self._resource = Gio.File.new_for_commandline_arg(path).create()
elif mode == Mode.APPEND:
self._resource = Gio.File.new_for_commandline_arg(path).append_to()
def __enter__(self):
"""Set opened file as runtime context"""
return self._resource
def __exit__(self, exc_type, exc_val, exc_tb):
"""Close file on exit from context"""
self.close()
def close(self):
"""Close file"""
self._resource.close()
def closed(self):
"""If file is closed"""
self._resource.is_closed()
def flush(self):
"""Flush internal buffer"""
if hasattr(self._resource, "flush"):
self._resource.flush()
def read(self, size=MAX_READ_FILE_SIZE):
"""Read at most _size_ bytes from the file"""
result = self._resource.read_bytes(size)
if result is True:
result = ""
return result.get_data()
def seek(self, offset, whence=0):
"""Set the file's current position"""
relative = (1, 0, 2)[whence]
if self._resource.can_seek():
self._resource.seek(offset, relative)
def tell(self):
"""Return file's current position"""
return self._resource.tell()
def truncate(self, size=None):
"""Truncate the file's size"""
if size is None:
size = self.tell()
if self._resource.can_truncate():
self._resource.truncate(size)
def write(self, buff):
"""Write string to the file"""
self._resource.write(buff)
|
commands | generate_preview_images | """ Generate preview images """
from bookwyrm import models, preview_images
from django.core.management.base import BaseCommand
# pylint: disable=line-too-long
class Command(BaseCommand):
"""Creates previews for existing objects"""
help = "Generate preview images"
# pylint: disable=no-self-use
def add_arguments(self, parser):
"""options for how the command is run"""
parser.add_argument(
"--all",
"-a",
action="store_true",
help="Generates images for ALL types: site, users and books. Can use a lot of computing power.",
)
# pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options):
"""generate preview images"""
self.stdout.write(
" | Hello! I will be generating preview images for your instance."
)
if options["all"]:
self.stdout.write(
"🧑🎨 ⎨ This might take quite long if your instance has a lot of books and users."
)
self.stdout.write(" | ✧ Thank you for your patience ✧")
else:
self.stdout.write("🧑🎨 ⎨ I will only generate the instance preview image.")
self.stdout.write(" | ✧ Be right back! ✧")
# Site
self.stdout.write(" → Site preview image: ", ending="")
preview_images.generate_site_preview_image_task.delay()
self.stdout.write(" OK 🖼")
# pylint: disable=consider-using-f-string
if options["all"]:
# Users
users = models.User.objects.filter(
local=True,
is_active=True,
)
self.stdout.write(
" → User preview images ({}): ".format(len(users)), ending=""
)
for user in users:
preview_images.generate_user_preview_image_task.delay(user.id)
self.stdout.write(".", ending="")
self.stdout.write(" OK 🖼")
# Books
book_ids = (
models.Book.objects.select_subclasses()
.filter()
.values_list("id", flat=True)
)
self.stdout.write(
" → Book preview images ({}): ".format(len(book_ids)), ending=""
)
for book_id in book_ids:
preview_images.generate_edition_preview_image_task.delay(book_id)
self.stdout.write(".", ending="")
self.stdout.write(" OK 🖼")
self.stdout.write("🧑🎨 ⎨ I’m all done! ✧ Enjoy ✧")
|
machine | keymap | import json
from collections import OrderedDict, defaultdict
from plover import log
class Keymap:
def __init__(self, keys, actions):
# List of supported actions.
self._actions = OrderedDict((action, n) for n, action in enumerate(actions))
self._actions["no-op"] = len(self._actions)
# List of supported keys.
self._keys = OrderedDict((key, n) for n, key in enumerate(keys))
# action -> keys
self._mappings = {}
# key -> action
self._bindings = {}
def get_keys(self):
return self._keys.keys()
def get_actions(self):
return self._actions.keys()
def set_bindings(self, bindings):
# Set from:
# { key1: action1, key2: action1, ... keyn: actionn }
mappings = defaultdict(list)
for key, action in dict(bindings).items():
mappings[action].append(key)
self.set_mappings(mappings)
def set_mappings(self, mappings):
# When setting from a string, assume a list of mappings:
# [[action1, [key1, key2]], [action2, [key3]], ...]
if isinstance(mappings, str):
mappings = json.loads(mappings)
mappings = dict(mappings)
# Set from:
# { action1: [key1, key2], ... actionn: [keyn] }
self._mappings = OrderedDict()
self._bindings = {}
bound_keys = defaultdict(list)
errors = []
for action in self._actions:
key_list = mappings.get(action)
if not key_list:
# Not an issue if 'no-op' is not mapped...
if action != "no-op":
errors.append("action %s is not bound" % action)
# Add dummy mapping for each missing action
# so it's shown in the configurator.
self._mappings[action] = ()
continue
if isinstance(key_list, str):
key_list = (key_list,)
valid_key_list = []
for key in key_list:
if key not in self._keys:
errors.append("invalid key %s bound to action %s" % (key, action))
continue
valid_key_list.append(key)
bound_keys[key].append(action)
self._bindings[key] = action
self._mappings[action] = tuple(sorted(valid_key_list, key=self._keys.get))
for action in set(mappings) - set(self._actions):
key_list = mappings.get(action)
if isinstance(key_list, str):
key_list = (key_list,)
errors.append(
"invalid action %s mapped to key(s) %s" % (action, " ".join(key_list))
)
for key, action_list in bound_keys.items():
if len(action_list) > 1:
errors.append(
"key %s is bound multiple times: %s" % (key, str(action_list))
)
if len(errors) > 0:
log.warning(
"Keymap is invalid, behavior undefined:\n\n- " + "\n- ".join(errors)
)
def get_bindings(self):
return self._bindings
def get_mappings(self):
return self._mappings
def get_action(self, key, default=None):
return self._bindings.get(key, default)
def keys_to_actions(self, key_list):
action_list = []
for key in key_list:
assert key in self._keys, "'%s' not in %s" % (key, self._keys)
action = self._bindings[key]
if "no-op" != action:
action_list.append(action)
return action_list
def keys(self):
return self._mappings.keys()
def values(self):
return self._mappings.values()
def __len__(self):
return len(self._mappings)
def __getitem__(self, key):
return self._mappings[key]
def __setitem__(self, action, key_list):
assert action in self._actions
if isinstance(key_list, str):
key_list = (key_list,)
# Delete previous bindings.
if action in self._mappings:
for old_key in self._mappings[action]:
if old_key in self._bindings:
del self._bindings[old_key]
errors = []
valid_key_list = []
for key in key_list:
if key not in self._keys:
errors.append("invalid key %s bound to action %s" % (key, action))
continue
if key in self._bindings:
errors.append(
"key %s is already bound to: %s" % (key, self._bindings[key])
)
continue
valid_key_list.append(key)
self._bindings[key] = action
self._mappings[action] = tuple(sorted(valid_key_list, key=self._keys.get))
if len(errors) > 0:
log.warning(
"Keymap is invalid, behavior undefined:\n\n- " + "\n- ".join(errors)
)
def __iter__(self):
return iter(self._mappings)
def __eq__(self, other):
return self.get_mappings() == other.get_mappings()
def __str__(self):
# Use the more compact list of mappings format:
# [[action1, [key1, key2]], [action2, [key3]], ...]
return json.dumps(list(self._mappings.items()))
|
plugins | twitch | """
$description Global live-streaming and video hosting social platform owned by Amazon.
$url twitch.tv
$type live, vod
$metadata id
$metadata author
$metadata category
$metadata title
$notes See the :ref:`Authentication <cli/plugins/twitch:Authentication>` docs on how to prevent ads.
$notes Read more about :ref:`embedded ads <cli/plugins/twitch:Embedded ads>` here.
$notes :ref:`Low latency streaming <cli/plugins/twitch:Low latency streaming>` is supported.
$notes Acquires a :ref:`client-integrity token <cli/plugins/twitch:Client-integrity token>` on streaming access token failure.
"""
import argparse
import base64
import logging
import re
import sys
from datetime import datetime, timedelta
from json import dumps as json_dumps
from random import random
from typing import List, Mapping, NamedTuple, Optional, Tuple
from urllib.parse import urlparse
from streamlink.exceptions import NoStreamsError, PluginError
from streamlink.plugin import Plugin, pluginargument, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.session import Streamlink
from streamlink.stream.hls import HLSStream, HLSStreamReader, HLSStreamWorker, HLSStreamWriter
from streamlink.stream.hls_playlist import M3U8, ByteRange, DateRange, ExtInf, Key, M3U8Parser, Map
from streamlink.stream.hls_playlist import load as load_hls_playlist
from streamlink.stream.hls_playlist import parse_tag
from streamlink.stream.http import HTTPStream
from streamlink.utils.args import keyvalue
from streamlink.utils.parse import parse_json, parse_qsd
from streamlink.utils.random import CHOICES_ALPHA_NUM, random_token
from streamlink.utils.times import fromtimestamp, hours_minutes_seconds_float
from streamlink.utils.url import update_qsd
log = logging.getLogger(__name__)
LOW_LATENCY_MAX_LIVE_EDGE = 2
class TwitchSegment(NamedTuple):
uri: str
duration: float
title: Optional[str]
key: Optional[Key]
discontinuity: bool
byterange: Optional[ByteRange]
date: Optional[datetime]
map: Optional[Map]
ad: bool
prefetch: bool
# generic namedtuples are unsupported, so just subclass
class TwitchSequence(NamedTuple):
num: int
segment: TwitchSegment
class TwitchM3U8(M3U8):
segments: List[TwitchSegment] # type: ignore[assignment]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dateranges_ads = []
class TwitchM3U8Parser(M3U8Parser):
m3u8: TwitchM3U8
@parse_tag("EXT-X-TWITCH-PREFETCH")
def parse_tag_ext_x_twitch_prefetch(self, value):
segments = self.m3u8.segments
if not segments: # pragma: no cover
return
last = segments[-1]
# Use the average duration of all regular segments for the duration of prefetch segments.
# This is better than using the duration of the last segment when regular segment durations vary a lot.
# In low latency mode, the playlist reload time is the duration of the last segment.
duration = last.duration if last.prefetch else sum(segment.duration for segment in segments) / float(len(segments))
# Use the last duration for extrapolating the start time of the prefetch segment, which is needed for checking
# whether it is an ad segment and matches the parsed date ranges or not
date = last.date + timedelta(seconds=last.duration)
# Don't reset the discontinuity state in prefetch segments (at the bottom of the playlist)
discontinuity = self._discontinuity
# Always treat prefetch segments after a discontinuity as ad segments
ad = discontinuity or self._is_segment_ad(date)
segment = last._replace(
uri=self.uri(value),
duration=duration,
title=None,
discontinuity=discontinuity,
date=date,
ad=ad,
prefetch=True,
)
segments.append(segment)
@parse_tag("EXT-X-DATERANGE")
def parse_tag_ext_x_daterange(self, value):
super().parse_tag_ext_x_daterange(value)
daterange = self.m3u8.dateranges[-1]
if self._is_daterange_ad(daterange):
self.m3u8.dateranges_ads.append(daterange)
# TODO: fix this mess by switching to segment dataclasses with inheritance
def get_segment(self, uri: str) -> TwitchSegment: # type: ignore[override]
extinf: ExtInf = self._extinf or ExtInf(0, None)
self._extinf = None
discontinuity = self._discontinuity
self._discontinuity = False
byterange = self._byterange
self._byterange = None
date = self._date
self._date = None
ad = self._is_segment_ad(date, extinf.title)
return TwitchSegment(
uri=uri,
duration=extinf.duration,
title=extinf.title,
key=self._key,
discontinuity=discontinuity,
byterange=byterange,
date=date,
map=self._map,
ad=ad,
prefetch=False,
)
def _is_segment_ad(self, date: Optional[datetime], title: Optional[str] = None) -> bool:
return (
title is not None
and "Amazon" in title
or any(self.m3u8.is_date_in_daterange(date, daterange) for daterange in self.m3u8.dateranges_ads)
)
@staticmethod
def _is_daterange_ad(daterange: DateRange) -> bool:
return (
daterange.classname == "twitch-stitched-ad"
or str(daterange.id or "").startswith("stitched-ad-")
or any(attr_key.startswith("X-TV-TWITCH-AD-") for attr_key in daterange.x.keys())
)
class TwitchHLSStreamWorker(HLSStreamWorker):
reader: "TwitchHLSStreamReader"
writer: "TwitchHLSStreamWriter"
stream: "TwitchHLSStream"
def __init__(self, reader, *args, **kwargs):
self.had_content = False
super().__init__(reader, *args, **kwargs)
def _reload_playlist(self, *args):
return load_hls_playlist(*args, parser=TwitchM3U8Parser, m3u8=TwitchM3U8)
def _playlist_reload_time(self, playlist: TwitchM3U8, sequences: List[TwitchSequence]): # type: ignore[override]
if self.stream.low_latency and sequences:
return sequences[-1].segment.duration
return super()._playlist_reload_time(playlist, sequences) # type: ignore[arg-type]
def process_sequences(self, playlist: TwitchM3U8, sequences: List[TwitchSequence]): # type: ignore[override]
# ignore prefetch segments if not LL streaming
if not self.stream.low_latency:
sequences = [seq for seq in sequences if not seq.segment.prefetch]
# check for sequences with real content
if not self.had_content:
self.had_content = next((True for seq in sequences if not seq.segment.ad), False)
# When filtering ads, to check whether it's a LL stream, we need to wait for the real content to show up,
# since playlists with only ad segments don't contain prefetch segments
if (
self.stream.low_latency
and self.had_content
and not next((True for seq in sequences if seq.segment.prefetch), False)
):
log.info("This is not a low latency stream")
# show pre-roll ads message only on the first playlist containing ads
if self.stream.disable_ads and self.playlist_sequence == -1 and not self.had_content:
log.info("Waiting for pre-roll ads to finish, be patient")
return super().process_sequences(playlist, sequences) # type: ignore[arg-type]
class TwitchHLSStreamWriter(HLSStreamWriter):
reader: "TwitchHLSStreamReader"
stream: "TwitchHLSStream"
def should_filter_sequence(self, sequence: TwitchSequence): # type: ignore[override]
return self.stream.disable_ads and sequence.segment.ad
class TwitchHLSStreamReader(HLSStreamReader):
__worker__ = TwitchHLSStreamWorker
__writer__ = TwitchHLSStreamWriter
worker: "TwitchHLSStreamWorker"
writer: "TwitchHLSStreamWriter"
stream: "TwitchHLSStream"
def __init__(self, stream: "TwitchHLSStream"):
if stream.disable_ads:
log.info("Will skip ad segments")
if stream.low_latency:
live_edge = max(1, min(LOW_LATENCY_MAX_LIVE_EDGE, stream.session.options.get("hls-live-edge")))
stream.session.options.set("hls-live-edge", live_edge)
stream.session.options.set("hls-segment-stream-data", True)
log.info(f"Low latency streaming (HLS live edge: {live_edge})")
super().__init__(stream)
class TwitchHLSStream(HLSStream):
__reader__ = TwitchHLSStreamReader
def __init__(self, *args, disable_ads: bool = False, low_latency: bool = False, **kwargs):
super().__init__(*args, **kwargs)
self.disable_ads = disable_ads
self.low_latency = low_latency
class UsherService:
def __init__(self, session):
self.session = session
def _create_url(self, endpoint, **extra_params):
url = f"https://usher.ttvnw.net{endpoint}"
params = {
"player": "twitchweb",
"p": int(random() * 999999),
"type": "any",
"allow_source": "true",
"allow_audio_only": "true",
"allow_spectre": "false",
}
params.update(extra_params)
req = self.session.http.prepare_new_request(url=url, params=params)
return req.url
def channel(self, channel, **extra_params):
try:
extra_params_debug = validate.Schema(
validate.get("token"),
validate.parse_json(),
{
"adblock": bool,
"geoblock_reason": str,
"hide_ads": bool,
"server_ads": bool,
"show_ads": bool,
},
).validate(extra_params)
log.debug(f"{extra_params_debug!r}")
except PluginError:
pass
return self._create_url(f"/api/channel/hls/{channel}.m3u8", **extra_params)
def video(self, video_id, **extra_params):
return self._create_url(f"/vod/{video_id}", **extra_params)
class TwitchAPI:
CLIENT_ID = "kimne78kx3ncx6brgo4mv6wki5h1ko"
def __init__(self, session, api_header=None, access_token_param=None):
self.session = session
self.headers = {
"Client-ID": self.CLIENT_ID,
}
self.headers.update(**dict(api_header or []))
self.access_token_params = dict(access_token_param or [])
self.access_token_params.setdefault("playerType", "embed")
def call(self, data, schema=None, **kwargs):
res = self.session.http.post(
"https://gql.twitch.tv/gql",
json=data,
headers={**self.headers, **kwargs.pop("headers", {})},
**kwargs,
)
return self.session.http.json(res, schema=schema)
@staticmethod
def _gql_persisted_query(operationname, sha256hash, **variables):
return {
"operationName": operationname,
"extensions": {
"persistedQuery": {
"version": 1,
"sha256Hash": sha256hash,
},
},
"variables": dict(**variables),
}
@staticmethod
def parse_token(tokenstr):
return parse_json(
tokenstr,
schema=validate.Schema(
{
"chansub": {
"restricted_bitrates": validate.all(
[str],
validate.filter(lambda n: not re.match(r"(.+_)?archives|live|chunked", n)),
)
}
},
validate.get(("chansub", "restricted_bitrates")),
),
)
# GraphQL API calls
def metadata_video(self, video_id):
query = self._gql_persisted_query(
"VideoMetadata",
"cb3b1eb2f2d2b2f65b8389ba446ec521d76c3aa44f5424a1b1d235fe21eb4806",
channelLogin="", # parameter can be empty
videoID=video_id,
)
return self.call(
query,
schema=validate.Schema(
{
"data": {
"video": {
"id": str,
"owner": {
"displayName": str,
},
"title": str,
"game": {
"displayName": str,
},
}
}
},
validate.get(("data", "video")),
validate.union_get(
"id",
("owner", "displayName"),
("game", "displayName"),
"title",
),
),
)
def metadata_channel(self, channel):
queries = [
self._gql_persisted_query(
"ChannelShell",
"c3ea5a669ec074a58df5c11ce3c27093fa38534c94286dc14b68a25d5adcbf55",
login=channel,
lcpVideosEnabled=False,
),
self._gql_persisted_query(
"StreamMetadata",
"059c4653b788f5bdb2f5a2d2a24b0ddc3831a15079001a3d927556a96fb0517f",
channelLogin=channel,
),
]
return self.call(
queries,
schema=validate.Schema(
[
validate.all(
{
"data": {
"userOrError": {
"displayName": str,
}
}
},
),
validate.all(
{
"data": {
"user": {
"lastBroadcast": {
"title": str,
},
"stream": {
"id": str,
"game": {
"name": str,
},
},
}
}
},
),
],
validate.union_get(
(1, "data", "user", "stream", "id"),
(0, "data", "userOrError", "displayName"),
(1, "data", "user", "stream", "game", "name"),
(1, "data", "user", "lastBroadcast", "title"),
),
),
)
def metadata_clips(self, clipname):
queries = [
self._gql_persisted_query(
"ClipsView",
"4480c1dcc2494a17bb6ef64b94a5213a956afb8a45fe314c66b0d04079a93a8f",
slug=clipname,
),
self._gql_persisted_query(
"ClipsTitle",
"f6cca7f2fdfbfc2cecea0c88452500dae569191e58a265f97711f8f2a838f5b4",
slug=clipname,
),
]
return self.call(
queries,
schema=validate.Schema(
[
validate.all(
{
"data": {
"clip": {
"id": str,
"broadcaster": {"displayName": str},
"game": {"name": str},
}
}
},
validate.get(("data", "clip")),
),
validate.all(
{"data": {"clip": {"title": str}}},
validate.get(("data", "clip")),
),
],
validate.union_get(
(0, "id"),
(0, "broadcaster", "displayName"),
(0, "game", "name"),
(1, "title"),
),
),
)
def access_token(self, is_live, channel_or_vod, client_integrity: Optional[Tuple[str, str]] = None):
query = self._gql_persisted_query(
"PlaybackAccessToken",
"0828119ded1c13477966434e15800ff57ddacf13ba1911c129dc2200705b0712",
isLive=is_live,
login=channel_or_vod if is_live else "",
isVod=not is_live,
vodID=channel_or_vod if not is_live else "",
**self.access_token_params,
)
subschema = validate.none_or_all(
{
"value": str,
"signature": str,
},
validate.union_get("signature", "value"),
)
headers = {}
if client_integrity:
headers["Device-Id"], headers["Client-Integrity"] = client_integrity
return self.call(
query,
acceptable_status=(200, 400, 401, 403),
headers=headers,
schema=validate.Schema(
validate.any(
validate.all(
{"errors": [{"message": str}]},
validate.get(("errors", 0, "message")),
validate.transform(lambda data: ("error", None, data)),
),
validate.all(
{"error": str, "message": str},
validate.union_get("error", "message"),
validate.transform(lambda data: ("error", *data)),
),
validate.all(
{
"data": validate.any(
validate.all(
{"streamPlaybackAccessToken": subschema},
validate.get("streamPlaybackAccessToken"),
),
validate.all(
{"videoPlaybackAccessToken": subschema},
validate.get("videoPlaybackAccessToken"),
),
),
},
validate.get("data"),
validate.transform(lambda data: ("token", *data) if data is not None else ("token", None, None)),
),
),
),
)
def clips(self, clipname):
query = self._gql_persisted_query(
"VideoAccessToken_Clip",
"36b89d2507fce29e5ca551df756d27c1cfe079e2609642b4390aa4c35796eb11",
slug=clipname,
)
return self.call(
query,
schema=validate.Schema(
{
"data": {
"clip": {
"playbackAccessToken": {
"signature": str,
"value": str,
},
"videoQualities": [
validate.all(
{
"frameRate": validate.transform(int),
"quality": str,
"sourceURL": validate.url(),
},
validate.transform(
lambda q: (
f"{q['quality']}p{q['frameRate']}",
q["sourceURL"],
)
),
)
],
}
}
},
validate.get(("data", "clip")),
validate.union_get(
("playbackAccessToken", "signature"),
("playbackAccessToken", "value"),
"videoQualities",
),
),
)
def stream_metadata(self, channel):
query = self._gql_persisted_query(
"StreamMetadata",
"1c719a40e481453e5c48d9bb585d971b8b372f8ebb105b17076722264dfa5b3e",
channelLogin=channel,
)
return self.call(
query,
schema=validate.Schema(
{"data": {"user": {"stream": {"type": str}}}},
validate.get(("data", "user", "stream")),
),
)
class TwitchClientIntegrity:
URL_P_SCRIPT = "https://k.twitchcdn.net/149e9513-01fa-4fb0-aad4-566afd725d1b/2d206a39-8ed7-437e-a3be-862e0f06eea3/p.js"
# language=javascript
JS_INTEGRITY_TOKEN = """
// noinspection JSIgnoredPromiseFromCall
new Promise((resolve, reject) => {
function configureKPSDK() {
// noinspection JSUnresolvedVariable,JSUnresolvedFunction
window.KPSDK.configure([{
"protocol": "https:",
"method": "POST",
"domain": "gql.twitch.tv",
"path": "/integrity"
}]);
}
async function fetchIntegrity() {
// noinspection JSUnresolvedReference
const headers = Object.assign(HEADERS, {"x-device-id": "DEVICE_ID"});
// window.fetch gets overridden and the patched function needs to be used
const resp = await window.fetch("https://gql.twitch.tv/integrity", {
"headers": headers,
"body": null,
"method": "POST",
"mode": "cors",
"credentials": "omit"
});
if (resp.status !== 200) {
throw new Error(`Unexpected integrity response status code ${resp.status}`);
}
return JSON.stringify(await resp.json());
}
document.addEventListener("kpsdk-load", configureKPSDK, {once: true});
document.addEventListener("kpsdk-ready", () => fetchIntegrity().then(resolve, reject), {once: true});
const script = document.createElement("script");
script.addEventListener("error", reject);
script.src = "SCRIPT_SOURCE";
document.body.appendChild(script);
});
"""
@classmethod
def acquire(
cls,
session: Streamlink,
channel: str,
headers: Mapping[str, str],
device_id: str,
) -> Optional[Tuple[str, int]]:
from streamlink.webbrowser.cdp import CDPClient, CDPClientSession, devtools
from streamlink.webbrowser.exceptions import WebbrowserError
url = f"https://www.twitch.tv/{channel}"
js_get_integrity_token = (
cls.JS_INTEGRITY_TOKEN.replace("SCRIPT_SOURCE", cls.URL_P_SCRIPT)
.replace("HEADERS", json_dumps(headers))
.replace("DEVICE_ID", device_id)
)
eval_timeout = session.get_option("webbrowser-timeout")
async def on_main(client_session: CDPClientSession, request: devtools.fetch.RequestPaused):
async with client_session.alter_request(request) as cm:
cm.body = "<!doctype html>"
async def acquire_client_integrity_token(client: CDPClient):
client_session: CDPClientSession
async with client.session() as client_session:
client_session.add_request_handler(on_main, url_pattern=url, on_request=True)
async with client_session.navigate(url) as frame_id:
await client_session.loaded(frame_id)
return await client_session.evaluate(js_get_integrity_token, timeout=eval_timeout)
try:
client_integrity: Optional[str] = CDPClient.launch(session, acquire_client_integrity_token)
except WebbrowserError as err:
log.error(f"{type(err).__name__}: {err}")
return None
if not client_integrity:
return None
token, expiration = parse_json(
client_integrity,
schema=validate.Schema(
{"token": str, "expiration": int},
validate.union_get("token", "expiration"),
),
)
is_bad_bot = cls.decode_client_integrity_token(
token,
schema=validate.Schema(
{"is_bad_bot": str},
validate.get("is_bad_bot"),
validate.transform(lambda val: val.lower() != "false"),
),
)
log.info(f"Is bad bot? {is_bad_bot}")
if is_bad_bot:
return None
return token, expiration / 1000
@staticmethod
def decode_client_integrity_token(data: str, schema: Optional[validate.Schema] = None):
if not data.startswith("v4.public."):
raise PluginError("Invalid client-integrity token format")
token = data[len("v4.public.") :].replace("-", "+").replace("_", "/")
token += "=" * ((4 - (len(token) % 4)) % 4)
token = base64.b64decode(token.encode())[:-64].decode()
log.debug(f"Client-Integrity token: {token}")
return parse_json(token, exception=PluginError, schema=schema)
@pluginmatcher(
re.compile(
r"""
https?://(?:(?P<subdomain>[\w-]+)\.)?twitch\.tv/
(?:
videos/(?P<videos_id>\d+)
|
(?P<channel>[^/?]+)
(?:
/v(?:ideo)?/(?P<video_id>\d+)
|
/clip/(?P<clip_name>[^/?]+)
)?
)
""",
re.VERBOSE,
)
)
@pluginargument(
"disable-ads",
action="store_true",
help="""
Skip embedded advertisement segments at the beginning or during a stream.
Will cause these segments to be missing from the output.
""",
)
@pluginargument(
"disable-hosting",
action="store_true",
help=argparse.SUPPRESS,
)
@pluginargument(
"disable-reruns",
action="store_true",
help="Do not open the stream if the target channel is currently broadcasting a rerun.",
)
@pluginargument(
"low-latency",
action="store_true",
help=f"""
Enables low latency streaming by prefetching HLS segments.
Sets --hls-segment-stream-data to true and --hls-live-edge to `{LOW_LATENCY_MAX_LIVE_EDGE}`, if it is higher.
Reducing --hls-live-edge to `1` will result in the lowest latency possible, but will most likely cause buffering.
In order to achieve true low latency streaming during playback, the player's caching/buffering settings will
need to be adjusted and reduced to a value as low as possible, but still high enough to not cause any buffering.
This depends on the stream's bitrate and the quality of the connection to Twitch's servers. Please refer to the
player's own documentation for the required configuration. Player parameters can be set via --player-args.
Note: Low latency streams have to be enabled by the broadcasters on Twitch themselves.
Regular streams can cause buffering issues with this option enabled due to the reduced --hls-live-edge value.
""",
)
@pluginargument(
"api-header",
metavar="KEY=VALUE",
type=keyvalue,
action="append",
help="""
A header to add to each Twitch API HTTP request.
Can be repeated to add multiple headers.
Useful for adding authentication data that can prevent ads. See the plugin-specific documentation for more information.
""",
)
@pluginargument(
"access-token-param",
metavar="KEY=VALUE",
type=keyvalue,
action="append",
help="""
A parameter to add to the API request for acquiring the streaming access token.
Can be repeated to add multiple parameters.
""",
)
@pluginargument(
"purge-client-integrity",
action="store_true",
help="Purge cached Twitch client-integrity token and acquire a new one.",
)
class Twitch(Plugin):
_CACHE_KEY_CLIENT_INTEGRITY = "client-integrity"
@classmethod
def stream_weight(cls, stream):
if stream == "source":
return sys.maxsize, stream
return super().stream_weight(stream)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
match = self.match.groupdict()
parsed = urlparse(self.url)
self.params = parse_qsd(parsed.query)
self.subdomain = match.get("subdomain")
self.video_id = None
self.channel = None
self.clip_name = None
self._checked_metadata = False
if self.subdomain == "player":
# pop-out player
if self.params.get("video"):
self.video_id = self.params["video"]
self.channel = self.params.get("channel")
elif self.subdomain == "clips":
# clip share URL
self.clip_name = match.get("channel")
else:
self.channel = match.get("channel") and match.get("channel").lower()
self.video_id = match.get("video_id") or match.get("videos_id")
self.clip_name = match.get("clip_name")
self.api = TwitchAPI(
session=self.session,
api_header=self.get_option("api-header"),
access_token_param=self.get_option("access-token-param"),
)
self.usher = UsherService(session=self.session)
def method_factory(parent_method):
def inner():
if not self._checked_metadata:
self._checked_metadata = True
self._get_metadata()
return parent_method()
return inner
parent = super()
for metadata in "id", "author", "category", "title":
method = f"get_{metadata}"
setattr(self, method, method_factory(getattr(parent, method)))
def _get_metadata(self):
try:
if self.video_id:
data = self.api.metadata_video(self.video_id)
elif self.clip_name:
data = self.api.metadata_clips(self.clip_name)
elif self.channel:
data = self.api.metadata_channel(self.channel)
else: # pragma: no cover
return
self.id, self.author, self.category, self.title = data
except (PluginError, TypeError):
pass
def _client_integrity_token(self, channel: str) -> Optional[Tuple[str, str]]:
if self.options.get("purge-client-integrity"):
log.info("Removing cached client-integrity token...")
self.cache.set(self._CACHE_KEY_CLIENT_INTEGRITY, None, 0)
client_integrity = self.cache.get(self._CACHE_KEY_CLIENT_INTEGRITY)
if client_integrity and isinstance(client_integrity, list) and len(client_integrity) == 2:
log.info("Using cached client-integrity token")
device_id, token = client_integrity
else:
log.info("Acquiring new client-integrity token...")
device_id = random_token(32, CHOICES_ALPHA_NUM)
client_integrity = TwitchClientIntegrity.acquire(
self.session,
channel,
self.api.headers,
device_id,
)
if not client_integrity:
log.warning("No client-integrity token acquired")
return None
token, expiration = client_integrity
self.cache.set(self._CACHE_KEY_CLIENT_INTEGRITY, [device_id, token], expires_at=fromtimestamp(expiration))
return device_id, token
def _access_token(self, is_live, channel_or_vod):
# try without a client-integrity token first (the web player did the same on 2023-05-31)
response, *data = self.api.access_token(is_live, channel_or_vod)
# try again with a client-integrity token if the API response was erroneous
if response != "token":
client_integrity = self._client_integrity_token(channel_or_vod) if is_live else None
response, *data = self.api.access_token(is_live, channel_or_vod, client_integrity)
# unknown API response error: abort
if response != "token":
error, message = data
raise PluginError(f"{error or 'Error'}: {message or 'Unknown error'}")
# access token response was empty: stream is offline or channel doesn't exist
if response == "token" and data[0] is None:
raise NoStreamsError
sig, token = data
try:
restricted_bitrates = self.api.parse_token(token)
except PluginError:
restricted_bitrates = []
return sig, token, restricted_bitrates
def _check_for_rerun(self):
if not self.options.get("disable_reruns"):
return False
try:
stream = self.api.stream_metadata(self.channel)
if stream["type"] != "live":
log.info("Reruns were disabled by command line option")
return True
except (PluginError, TypeError):
pass
return False
def _get_hls_streams_live(self):
if self._check_for_rerun():
return
# only get the token once the channel has been resolved
log.debug(f"Getting live HLS streams for {self.channel}")
self.session.http.headers.update(
{
"referer": "https://player.twitch.tv",
"origin": "https://player.twitch.tv",
}
)
sig, token, restricted_bitrates = self._access_token(True, self.channel)
url = self.usher.channel(self.channel, sig=sig, token=token, fast_bread=True)
return self._get_hls_streams(url, restricted_bitrates)
def _get_hls_streams_video(self):
log.debug(f"Getting HLS streams for video ID {self.video_id}")
sig, token, restricted_bitrates = self._access_token(False, self.video_id)
url = self.usher.video(self.video_id, nauthsig=sig, nauth=token)
# If the stream is a VOD that is still being recorded, the stream should start at the beginning of the recording
return self._get_hls_streams(url, restricted_bitrates, force_restart=True)
def _get_hls_streams(self, url, restricted_bitrates, **extra_params):
time_offset = self.params.get("t", 0)
if time_offset:
try:
time_offset = hours_minutes_seconds_float(time_offset)
except ValueError:
time_offset = 0
try:
streams = TwitchHLSStream.parse_variant_playlist(
self.session,
url,
start_offset=time_offset,
disable_ads=self.get_option("disable-ads"),
low_latency=self.get_option("low-latency"),
**extra_params,
)
except OSError as err:
err = str(err)
if "404 Client Error" in err or "Failed to parse playlist" in err:
return
else:
raise PluginError(err) from err
for name in restricted_bitrates:
if name not in streams:
log.warning(f"The quality '{name}' is not available since it requires a subscription.")
return streams
def _get_clips(self):
try:
sig, token, streams = self.api.clips(self.clip_name)
except (PluginError, TypeError):
return
for quality, stream in streams:
yield quality, HTTPStream(self.session, update_qsd(stream, {"sig": sig, "token": token}))
def _get_streams(self):
if self.video_id:
return self._get_hls_streams_video()
elif self.clip_name:
return self._get_clips()
elif self.channel:
return self._get_hls_streams_live()
__plugin__ = Twitch
|
PathTests | TestPathDepthParams | # -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2016 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import unittest
import PathScripts.PathUtils as PathUtils
class depthTestCases(unittest.TestCase):
def test00(self):
"""Stepping down to zero"""
args = {
"clearance_height": 15,
"safe_height": 12,
"start_depth": 10,
"step_down": 2,
"z_finish_step": 1,
"final_depth": 0,
"user_depths": None,
}
expected = [8, 6, 4, 2, 1, 0]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
def test001(self):
"""Stepping from zero to a negative depth"""
args = {
"clearance_height": 10,
"safe_height": 5,
"start_depth": 0,
"step_down": 2,
"z_finish_step": 0,
"final_depth": -10,
"user_depths": None,
}
expected = [-2, -4, -6, -8, -10]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
def test002(self):
"""Start and end are equal or start lower than finish"""
args = {
"clearance_height": 15,
"safe_height": 12,
"start_depth": 10,
"step_down": 2,
"z_finish_step": 0,
"final_depth": 10,
"user_depths": None,
}
expected = [10]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
args["start_depth"] = 10
args["final_depth"] = 15
expected = []
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
def test003(self):
"""User Parameters passed in"""
args = {
"clearance_height": 10,
"safe_height": 5,
"start_depth": 0,
"step_down": 2,
"z_finish_step": 0,
"final_depth": -10,
"user_depths": [2, 4, 8, 10, 11, 12],
}
expected = [2, 4, 8, 10, 11, 12]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
def test004(self):
"""z_finish_step passed in."""
args = {
"clearance_height": 10,
"safe_height": 5,
"start_depth": 0,
"step_down": 2,
"z_finish_step": 1,
"final_depth": -10,
"user_depths": None,
}
expected = [-2, -4, -6, -8, -9, -10]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
def test005(self):
"""stepping down with equalstep=True"""
args = {
"clearance_height": 10,
"safe_height": 5,
"start_depth": 10,
"step_down": 3,
"z_finish_step": 0,
"final_depth": 0,
"user_depths": None,
"equalstep": True,
}
expected = [7.5, 5.0, 2.5, 0]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
def test006(self):
"""stepping down with equalstep=True and a finish depth"""
args = {
"clearance_height": 10,
"safe_height": 5,
"start_depth": 10,
"step_down": 3,
"z_finish_step": 1,
"final_depth": 0,
"user_depths": None,
}
expected = [7.0, 4.0, 1.0, 0]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
def test007(self):
"""stepping down with stepdown greater than total depth"""
args = {
"clearance_height": 10,
"safe_height": 5,
"start_depth": 10,
"step_down": 20,
"z_finish_step": 1,
"final_depth": 0,
"user_depths": None,
}
expected = [1.0, 0]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
def test008(self):
"""Test handling of negative step-down, negative finish step, and relative size of step/finish"""
# negative steps should be converted to positive values
args = {
"clearance_height": 3,
"safe_height": 3,
"start_depth": 2,
"step_down": -1,
"z_finish_step": -1,
"final_depth": 0,
"user_depths": None,
}
expected = [1.0, 0]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(r, expected)
# a step_down less than the finish step is an error
args = {
"clearance_height": 3,
"safe_height": 3,
"start_depth": 2,
"step_down": 0.1,
"z_finish_step": 1,
"final_depth": 0,
"user_depths": None,
}
self.assertRaises(ValueError, PathUtils.depth_params, **args)
def test009(self):
"""stepping down with single stepdown exactly equal to total depth"""
args = {
"clearance_height": 20.0,
"safe_height": 15.0,
"start_depth": 10.0,
"step_down": 10.0,
"z_finish_step": 0.0,
"final_depth": 0.0,
"user_depths": None,
}
expected = [0]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(
r, expected, "Expected {}, but result of {}".format(expected, r)
)
def test010(self):
"""stepping down with single stepdown roughly equal to total depth"""
args = {
"clearance_height": 20.0,
"safe_height": 15.0,
"start_depth": 10.000000001,
"step_down": 10.0,
"z_finish_step": 0.0,
"final_depth": 0.0,
"user_depths": None,
}
expected = [0]
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(
r, expected, "Expected {}, but result of {}".format(expected, r)
)
args = {
"clearance_height": 20.0,
"safe_height": 15.0,
"start_depth": 10.0,
"step_down": 9.9999999,
"z_finish_step": 0.0,
"final_depth": 0.0,
"user_depths": None,
}
d = PathUtils.depth_params(**args)
r = [i for i in d]
self.assertListEqual(
r, expected, "Expected {}, but result of {}".format(expected, r)
)
|
interface | gladewindow | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# SoundConverter - GNOME application for converting between audio formats.
# Copyright 2004 Lars Wirzenius
# Copyright 2005-2020 Gautier Portet
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 3 of the License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
class GladeWindow:
callbacks = {}
builder = None
def __init__(self, builder):
"""Init GladeWindow, store the objects's potential callbacks for later.
You have to call connect_signals() when all descendants are ready.
"""
GladeWindow.builder = builder
GladeWindow.callbacks.update(
dict([[x, getattr(self, x)] for x in dir(self) if x.startswith("on_")])
)
def __getattr__(self, attribute):
"""Allow direct use of window widget."""
widget = GladeWindow.builder.get_object(attribute)
if widget is None:
raise AttributeError("Widget '{}' not found".format(attribute))
self.__dict__[attribute] = widget # cache result
return widget
@staticmethod
def connect_signals():
"""Connect all GladeWindow objects to theirs respective signals."""
GladeWindow.builder.connect_signals(GladeWindow.callbacks)
|
Code | Util | import atexit
import base64
import codecs
import collections
import datetime
import gc
import glob
import hashlib
import os
import random
import shutil
import sqlite3
import threading
import time
import zlib
from itertools import cycle, izip
import chardet.universaldetector
import cPickle
import scandir
def xor_crypt(data, key):
"""
http://bytes.com/topic/python/answers/881561-xor-encryption
Author = http://bytes.com/profile/247871/darktemp/
"""
if key:
return "".join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(key)))
else:
return data
def nuevoID():
d = datetime.datetime.now()
r = random.randint
t = (
(
(((r(1, d.year) * 12 + r(1, d.month)) * 31 + d.day) * 24 + d.hour) * 60
+ d.minute
)
* 60
+ d.second
) * 1000 + r(1, d.microsecond + 737) / 1000
return t
def guardaDIC(dic, fich):
with open(fich, "w") as q:
q.write(base64.encodestring(cPickle.dumps(dic)))
def recuperaDIC(fich):
try:
with open(fich) as f:
s = f.read()
dic = cPickle.loads(base64.decodestring(s))
except:
dic = None
return dic
def guardaVar(fich, v):
with open(fich, "w") as q:
q.write(cPickle.dumps(v))
def recuperaVar(fich, default=None):
try:
with open(fich) as f:
s = f.read()
v = cPickle.loads(s)
except:
v = default
return v
def var2blob(var):
varp = cPickle.dumps(var)
varz = zlib.compress(varp, 7)
return sqlite3.Binary(varz)
def blob2var(blob):
if blob is None:
return None
varp = zlib.decompress(blob)
return cPickle.loads(varp)
def dic2blob(dic):
varp = str(dic).replace(", ", ",").replace(": ", ":")
varz = zlib.compress(varp, 7)
return sqlite3.Binary(varz)
def blob2dic(blob):
if blob is None:
return {}
varp = zlib.decompress(blob)
return eval(varp)
def str2blob(varp):
varz = zlib.compress(varp, 7)
return sqlite3.Binary(varz)
def blob2str(blob):
if blob is None:
return ""
return str(zlib.decompress(blob))
def dic2txt(dic):
return base64.encodestring(cPickle.dumps(dic)).replace("\n", "|")
def txt2dic(txt):
txt = txt.replace("|", "\n")
dic = cPickle.loads(base64.decodestring(txt))
return dic
def var2txt(var):
return cPickle.dumps(var)
def txt2var(txt):
return cPickle.loads(txt)
def renombraNum(origen):
num = 1
while os.path.isfile("%s.%d" % (origen, num)):
num += 1
os.rename(origen, "%s.%d" % (origen, num))
class Almacen:
pass
class Record:
pass
def hoy():
return datetime.datetime.now()
def dtos(f):
return "%04d%02d%02d" % (f.year, f.month, f.day)
def stod(txt):
if txt and len(txt) == 8 and txt.isdigit():
return datetime.date(int(txt[:4]), int(txt[4:6]), int(txt[6:]))
return None
def dtosext(f):
return "%04d%02d%02d%02d%02d%02d" % (
f.year,
f.month,
f.day,
f.hour,
f.minute,
f.second,
)
def stodext(txt):
if txt and len(txt) == 14 and txt.isdigit():
return datetime.datetime(
int(txt[:4]),
int(txt[4:6]),
int(txt[6:8]),
int(txt[8:10]),
int(txt[10:12]),
int(txt[12:]),
)
return None
def primeraMayuscula(txt):
return txt[0].upper() + txt[1:]
def huella():
m = hashlib.md5()
m.update(str(random.random()) + str(hoy()))
return m.hexdigest()
def microsegundosRnd():
d = datetime.datetime.now()
return random.randint(0, 1000) + 1000 * (
d.microsecond
+ 1000000 * (d.second + 60 * (d.minute + 60 * (d.hour + 24 * d.toordinal())))
)
def fileNext(folder, base, ext):
n = 1
path_ = os.path.join(folder, "%s%s.%s" % (base, "%d", ext))
while existeFichero(path_ % n):
n += 1
return path_ % n
def ficheroTemporal(pathTemp, extension):
creaCarpeta(pathTemp)
while True:
fich = os.path.join(
pathTemp, "%d.%s" % (random.randint(1, 999999999), extension)
)
if not existeFichero(fich):
return fich
def tamFichero(fichero):
return os.path.getsize(fichero) if os.path.isfile(fichero) else -1
def existeFichero(fichero):
return tamFichero(fichero) >= 0
def copiaFichero(origen, destino):
if existeFichero(origen):
if borraFichero(destino):
shutil.copy2(origen, destino)
return True
return False
def renombraFichero(origen, destino):
if not existeFichero(origen):
return False
origen = os.path.abspath(origen)
destino = os.path.abspath(destino)
if origen == destino:
return True
if origen.lower() == destino.lower():
os.rename(origen, destino)
return True
if borraFichero(destino):
shutil.move(origen, destino)
return True
return False
def borraFichero(fichero):
try:
os.remove(fichero)
except:
pass
return not os.path.isfile(fichero)
def ini2lista(fichero, etiClave="CLAVE"):
li = []
if os.path.isfile(fichero):
f = open(fichero, "rb")
for linea in f:
linea = linea.strip()
if linea:
if linea.startswith("["):
clave = linea[1:-1]
dic = collections.OrderedDict()
li.append(dic)
dic[etiClave] = clave
else:
n = linea.find("=")
if n:
clave1 = linea[:n].strip()
valor = linea[n + 1 :].strip()
dic[clave1] = valor
f.close()
return li
def lista2ini(fichero, lista, etiClave="CLAVE"):
f = open(fichero, "wb")
for dic in lista:
f.write("[%s]\n" % dic[etiClave])
for k in dic:
if k != etiClave:
f.write("%s=%s\n" % (k, dic[k]))
f.close()
def ini2dic(fichero):
dicBase = collections.OrderedDict()
if os.path.isfile(fichero):
f = open(fichero, "rb")
for linea in f:
linea = linea.strip()
if linea and not linea.startswith("#"):
if linea.startswith("["):
clave = linea[1:-1]
dic = collections.OrderedDict()
dicBase[clave] = dic
else:
n = linea.find("=")
if n > 0:
clave1 = linea[:n].strip()
valor = linea[n + 1 :].strip()
dic[clave1] = valor
f.close()
return dicBase
def ini8dic(fichero):
dicBase = collections.OrderedDict()
if os.path.isfile(fichero):
f = codecs.open(fichero, "r", "utf-8", "ignore")
for linea in f:
linea = linea.strip()
if linea and not linea.startswith("#"):
if linea.startswith("["):
clave = linea[1:-1]
dic = collections.OrderedDict()
dicBase[clave] = dic
else:
n = linea.find("=")
if n > 0:
clave1 = linea[:n].strip()
valor = linea[n + 1 :].strip()
dic[clave1] = valor
f.close()
return dicBase
def dic8ini(fichero, dic):
f = codecs.open(fichero, "w", "utf-8", "ignore")
for k in dic:
f.write("[%s]\n" % k)
for clave in dic[k]:
f.write("%s=%s\n" % (clave, dic[k][clave]))
f.close()
def iniBase8dic(fichero):
dic = {}
if os.path.isfile(fichero):
f = codecs.open(fichero, "r", "utf-8", "ignore")
for linea in f:
linea = linea.strip()
if linea.startswith("#"):
continue
if linea:
n = linea.find("=")
if n:
clave = linea[:n].strip()
valor = linea[n + 1 :].strip()
dic[clave] = valor
f.close()
return dic
def dic8iniBase(fichero, dic):
f = codecs.open(fichero, "w", "utf-8", "ignore")
for k in dic:
f.write("%s=%s\n" % (k, dic[k]))
f.close()
def creaCarpeta(carpeta):
if not os.path.isdir(carpeta):
try:
os.mkdir(carpeta)
except:
pass
def secs2str(s):
m = s / 60
s = s % 60
h = m / 60
m = m % 60
return "%02d:%02d:%02d" % (h, m, s)
class ListaNumerosImpresion:
def __init__(self, txt):
# Formas
# 1. <num> 1, <num>, 0
# 2. <num>- 2, <num>, 0
# 3. <num>-<num> 3, <num>,<num>
# 4. -<num> 4, <num>, 0
self.lista = []
if txt:
txt = txt.replace("--", "-").replace(",,", ",").replace(" ", "")
for bloque in txt.split(","):
if bloque.startswith("-"):
num = bloque[1:]
if num.isdigit():
self.lista.append((4, int(num)))
elif bloque.endswith("-"):
num = bloque[:-1]
if num.isdigit():
self.lista.append((2, int(num)))
elif "-" in bloque:
li = bloque.split("-")
if len(li) == 2:
num1, num2 = li
if num1.isdigit() and num2.isdigit():
i1 = int(num1)
i2 = int(num2)
if i1 <= i2:
self.lista.append((3, i1, i2))
elif bloque.isdigit():
self.lista.append((1, int(bloque)))
def siEsta(self, pos):
if not self.lista:
return True
for patron in self.lista:
modo = patron[0]
i1 = patron[1]
if modo == 1:
if pos == i1:
return True
elif modo == 2:
if pos >= i1:
return True
elif modo == 3:
i2 = patron[2]
if i1 <= pos <= i2:
return True
elif modo == 4:
if pos <= i1:
return True
return False
def selected(self, lista):
return [x for x in lista if self.siEsta(x)]
def speed():
t = time.time()
for x in xrange(100000):
for i in xrange(10):
oct(i)
gc.enable()
return time.time() - t
class SymbolDict:
def __init__(self, dic=None):
self._dic = {}
self._keys = []
if dic:
for k, v in dic.iteritems():
self.__setitem__(k, v)
def __contains__(self, clave):
return clave.upper() in self._dic
def __len__(self):
return len(self._keys)
def __getitem__(self, clave):
if type(clave) == int:
return self._keys[clave]
return self._dic[clave.upper()]
def __setitem__(self, clave, valor):
clu = clave.upper()
if clu not in self._dic:
self._keys.append(clave)
self._dic[clu] = valor
def get(self, clave, default=None):
clu = clave.upper()
if clu not in self._dic:
return default
return self.__getitem__(clave)
def iteritems(self):
for k in self._keys:
yield k, self.__getitem__(k)
def keys(self):
return self._keys[:]
def __str__(self):
x = ""
for t in self._keys:
x += "[%s]=[%s]\n" % (t, str(self.__getitem__(t)))
return x.strip()
class IPC(object):
def __init__(self, nomFichero, siPush):
if siPush and os.path.isfile(nomFichero):
try:
os.remove(nomFichero)
except:
pass
self._conexion = sqlite3.connect(nomFichero)
atexit.register(self.close)
if siPush:
sql = "CREATE TABLE DATOS( DATO BLOB );"
self._conexion.cursor().execute(sql)
self._conexion.commit()
self.key = 0
def pop(self):
cursor = self._conexion.cursor()
nk = self.key + 1
sql = "SELECT dato FROM DATOS WHERE ROWID = %d" % nk
cursor.execute(sql)
reg = cursor.fetchone()
if reg:
valor = cPickle.loads(str(reg[0]))
self.key = nk
else:
valor = None
cursor.close()
return valor
def push(self, valor):
cursor = self._conexion.cursor()
dato = sqlite3.Binary(cPickle.dumps(valor))
sql = "INSERT INTO DATOS (dato) values(?)"
cursor.execute(
sql,
[
dato,
],
)
cursor.close()
self._conexion.commit()
def close(self):
if self._conexion:
self._conexion.close()
self._conexion = None
class Rondo:
def __init__(self, *lista):
self.pos = -1
self.lista = lista
self.tope = len(self.lista)
def shuffle(self):
li = list(self.lista)
random.shuffle(li)
self.lista = li
def otro(self):
self.pos += 1
if self.pos == self.tope:
self.pos = 0
return self.lista[self.pos]
def reset(self):
self.pos = -1
def validNomFichero(nombre):
nombre = nombre.strip()
for x in '\\:/|?*^%><(),;"':
if x in nombre:
nombre = nombre.replace(x, "_")
return nombre
def asciiNomFichero(nombre):
nombre = validNomFichero(nombre)
li = []
for x in nombre:
if not (31 < ord(x) < 127):
li.append("_")
else:
li.append(x)
nombre = "".join(li)
while "__" in nombre:
nombre = nombre.replace("__", "_")
return nombre
def datefile(pathfile):
try:
mtime = os.path.getmtime(pathfile)
return datetime.datetime.fromtimestamp(mtime)
except:
return None
class Timer:
def __init__(self, tiempoPendiente):
self.tiempoPendiente = tiempoPendiente
self.marcaTiempo = None
self.txt = ""
self.marcaZeitnot = 0
def texto(self, segs):
if segs <= 0.0:
segs = 0.0
tp = int(segs)
txt = "%02d:%02d" % (int(tp / 60), tp % 60)
return txt
def ponSegExtra(self, segs):
self.tiempoPendiente += segs
def dameSegundos(self):
if self.marcaTiempo:
tp = self.tiempoPendiente - (time.time() - self.marcaTiempo)
else:
tp = self.tiempoPendiente
if tp <= 0.0:
tp = 0
return int(tp)
def dameSegundos2(self):
if self.marcaTiempo:
tp2 = int(time.time() - self.marcaTiempo)
tp = int(self.tiempoPendiente) - tp2
else:
tp = self.tiempoPendiente
tp2 = 0
if tp <= 0.0:
tp = 0
return int(tp), tp2
def etiqueta(self):
return self.texto(self.dameSegundos())
def etiqueta2(self):
tp, tp2 = self.dameSegundos2()
return self.texto(tp), self.texto(tp2)
def etiquetaDif(self):
nvEti = self.etiqueta()
if nvEti != self.txt:
self.txt = nvEti
return nvEti
return None
def etiquetaDif2(self):
nvEti, nvEti2 = self.etiqueta2()
if nvEti != self.txt:
self.txt = nvEti
return nvEti, nvEti2
return None, None
def etiquetaDGT(self):
segs = self.dameSegundos()
mins = segs / 60
segs -= mins * 60
hors = mins / 60
mins -= hors * 60
return "%d:%02d:%02d" % (hors, mins, segs)
def siAgotado(self):
if self.marcaTiempo:
if (self.tiempoPendiente - (time.time() - self.marcaTiempo)) <= 0.0:
return True
else:
return self.tiempoPendiente <= 0.0
return False
def isZeitnot(self):
if self.marcaZeitnot:
if self.marcaTiempo:
t = self.tiempoPendiente - (time.time() - self.marcaTiempo)
else:
t = self.tiempoPendiente
if t > 0:
resp = t < self.marcaZeitnot
if resp:
self.marcaZeitnot = None
return resp
return False
def setZeitnot(self, segs):
self.marcaZeitnot = segs
def iniciaMarcador(self):
self.marcaTiempo = time.time()
def paraMarcador(self, tiempoJugada):
if self.marcaTiempo:
self.tiempoPendiente -= (time.time() - self.marcaTiempo) - tiempoJugada
self.marcaTiempo = None
def tiempoAplazamiento(self):
self.paraMarcador(0.00)
return self.tiempoPendiente
def save(self):
return (self.tiempoPendiente, self.marcaZeitnot)
def restore(self, tvar):
self.tiempoPendiente, self.marcaZeitnot = tvar
self.marcaTiempo = None
self.txt = ""
def fideELO(eloJugador, eloRival, resultado):
if resultado == +1:
resultado = 1.0
elif resultado == 0:
resultado = 0.5
else:
resultado = 0.0
if eloJugador <= 1200:
k = 40.0
elif eloJugador <= 2100:
k = 32.0
elif eloRival < 2400:
k = 24.0
else:
k = 16.0
probabilidad = 1.0 / (1.0 + (10.0 ** ((eloRival - eloJugador) / 400.0)))
return int(k * (resultado - probabilidad))
date_format = [
"%Y.%m.%d",
]
def localDate(date):
return date.strftime(date_format[0])
def localDateT(date):
return "%s %02d:%02d" % (date.strftime(date_format[0]), date.hour, date.minute)
def listfiles(*lista):
f = lista[0]
if len(lista) > 1:
for x in lista[1:]:
f = os.path.join(f, x)
return glob.glob(f)
def listdir(txt, siUnicode=False):
return scandir.scandir(unicode(txt) if siUnicode else txt)
def dirRelativo(dr):
if dr:
try:
nr = os.path.relpath(dr)
if not nr.startswith(".."):
dr = nr
except:
pass
else:
dr = ""
return dr
def cX():
b3 = "Sibuscasresultadosdistintosnohagassiemprelomismo"
n7 = 669558
t1 = len(b3)
p3 = 0
c9 = ""
while n7 > 0:
nr = n7 % 10
n7 /= 10
p3 += nr
if p3 >= t1:
p3 -= t1
c9 += b3[p3] + str(p3)
c9 = list(c9)
c9.reverse()
c9 = "".join(c9)
return c9
def enc(cad):
pos = 0
resp = ""
cl = cX()
lcl = len(cl)
lcad = len(cad)
for i in range(lcad):
h = ord(cad[i]) + i + ord(cl[pos])
resp += "%03d" % h
pos += 1
if pos >= lcl:
pos = 0
return resp
# def dec( cad ):
# pos = 0
# resp = ""
# cl = cX()
# lcl = len(cl)
# lcad = len(cad)/3
# for i in range(lcad):
# nc = i*3
# c0 = cad[nc:nc+3]
# h = int(c0)
# h = h - i - ord(cl[pos])
# resp += chr( h )
# pos += 1
# if pos >= lcl:
# pos = 0
# return resp
def creaID():
r = random.randint(1, 9999)
d = datetime.datetime.now()
s = "%d,%s" % (
r,
d.isoformat()[2:]
.strip("0")
.replace("-", "")
.replace("T", "")
.replace(":", "")
.replace(".", ""),
)
cr = enc(s)
return cr
# ~ import subprocess
# ~ class Proceso():
# ~ def __init__( self, exe ):
# ~ self.setWorkingDirectory ( os.path.abspath(os.path.dirname(exe)) )
# ~ if "critter" in exe.lower():
# ~ startupinfo = subprocess.STARTUPINFO()
# ~ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# ~ startupinfo.wShowWindow = subprocess.SW_HIDE
# ~ self.popen = p = subprocess.Popen("", executable=exe, stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell=False, startupinfo=startupinfo)
# ~ else:
# ~ self.popen = p = subprocess.Popen( "", executable=exe, shell=True, \
# ~ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
# ~ self.stdin = p.stdin
# ~ self.stdout = p.stdout
# ~ def escribeLinea( self, linea ):
# ~ self.stdin.write( linea + "\n" )
# ~ def esperaRespuesta( self, x=None ):
# ~ return self.stdout.readline()
# ~ def terminar( self ):
# ~ try:
# ~ self.popen.terminate()
# ~ except:
# ~ pass
# ~ from ctypes import *
# ~ from ctypes.wintypes import *
# ~ class MEMORYSTATUS(Structure):
# ~ _fields_ = [
# ~ ('dwLength', DWORD),
# ~ ('dwMemoryLoad', DWORD),
# ~ ('dwTotalPhys', DWORD),
# ~ ('dwAvailPhys', DWORD),
# ~ ('dwTotalPageFile', DWORD),
# ~ ('dwAvailPageFile', DWORD),
# ~ ('dwTotalVirtual', DWORD),
# ~ ('dwAvailVirtual', DWORD),
# ~ ]
# ~ def winmem():
# ~ x = MEMORYSTATUS()
# ~ windll.kernel32.GlobalMemoryStatus(byref(x))
# ~ return x
# def detectCPUs():
# """
# Detects the number of CPUs on a system. Cribbed from pp.
# http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
# Literal : I found this interesting function in a post on Twister and distributed programming by Bruce Eckel. It uses the Python os package to detect the number of CPUs/cores on a machine. Archiving it here for future reference
# """
# # Linux, Unix and MacOS:
# if hasattr(os, "sysconf"):
# if os.sysconf_names.has_key("SC_NPROCESSORS_ONLN"):
# # Linux & Unix:
# ncpus = os.sysconf("SC_NPROCESSORS_ONLN")
# if isinstance(ncpus, int) and ncpus > 0:
# return ncpus
# else: # OSX:
# return int(os.popen2("sysctl -n hw.ncpu")[1].read())
# # Windows:
# if os.environ.has_key("NUMBER_OF_PROCESSORS"):
# ncpus = int(os.environ["NUMBER_OF_PROCESSORS"]);
# if ncpus > 0:
# return ncpus
# return 1 # Default
class DicSQL(object):
def __init__(self, nomDB, tabla="Data", maxCache=2048):
self.table = tabla
self.maxCache = maxCache
self.cache = collections.OrderedDict()
self._conexion = sqlite3.connect(nomDB)
self._conexion.text_factory = lambda x: unicode(x, "utf-8", "ignore")
atexit.register(self.close)
cursor = self._conexion.cursor()
cursor.execute("pragma table_info(%s)" % tabla)
if not cursor.fetchall():
sql = "CREATE TABLE %s( KEY TEXT PRIMARY KEY, VALUE TEXT );" % tabla
cursor.execute(sql)
self._conexion.commit()
cursor.close()
self.stKeys = set()
cursor = self._conexion.cursor()
sql = "SELECT KEY FROM %s" % self.table
cursor.execute(sql)
li = cursor.fetchall()
for reg in li:
self.stKeys.add(reg[0])
cursor.close()
def __contains__(self, key):
return key in self.stKeys
def addCache(self, key, obj):
if len(self.cache) > self.maxCache:
del self.cache[self.cache.keys()[0]]
self.cache[key] = obj
def __setitem__(self, key, obj):
cursor = self._conexion.cursor()
dato = base64.encodestring(cPickle.dumps(obj))
key = str(key)
siYaEsta = key in self.stKeys
if siYaEsta:
sql = "UPDATE %s SET VALUE=? WHERE KEY = ?" % self.table
else:
sql = "INSERT INTO %s (VALUE,KEY) values(?,?)" % self.table
self.stKeys.add(key)
cursor.execute(sql, (dato, key))
cursor.close()
self._conexion.commit()
self.addCache(key, obj)
def __getitem__(self, key):
key = str(key)
if key in self.stKeys:
if key in self.cache:
return self.cache[key]
cursor = self._conexion.cursor()
sql = "SELECT VALUE FROM %s WHERE KEY= ?" % self.table
cursor.execute(sql, (key,))
li = cursor.fetchone()
cursor.close()
dato = base64.decodestring(li[0])
obj = cPickle.loads(dato)
self.addCache(key, obj)
return obj
else:
return None
def __delitem__(self, key):
key = str(key)
if key in self.stKeys:
self.stKeys.remove(key)
if key in self.cache:
del self.cache[key]
cursor = self._conexion.cursor()
sql = "DELETE FROM %s WHERE KEY= ?" % self.table
cursor.execute(sql, (key,))
cursor.close()
self._conexion.commit()
def __len__(self):
return len(self.stKeys)
def close(self):
if self._conexion:
self._conexion.close()
self._conexion = None
def keys(self, siOrdenados=False, siReverse=False):
li = list(self.stKeys)
return sorted(li, reverse=siReverse) if siOrdenados else li
def get(self, key, default):
key = str(key)
if key in self.stKeys:
return self.__getitem__(key)
else:
return default
def asDictionary(self):
dic = collections.OrderedDict()
cursor = self._conexion.cursor()
sql = "SELECT KEY,VALUE FROM %s" % self.table
cursor.execute(sql)
li = cursor.fetchall()
for key, dato in li:
dato = base64.decodestring(dato)
dic[key] = cPickle.loads(dato)
cursor.close()
return dic
def pack(self):
cursor = self._conexion.cursor()
cursor.execute("VACUUM")
cursor.close()
self._conexion.commit()
def deleteall(self):
cursor = self._conexion.cursor()
cursor.execute("DELETE FROM %s" % self.table)
cursor.execute("VACUUM")
cursor.close()
self._conexion.commit()
def __enter__(self):
return self
def __exit__(self, xtype, value, traceback):
self.close()
class LIdisk:
def __init__(self, nomFichero):
self.nomFichero = nomFichero
self._conexion = sqlite3.connect(nomFichero)
self._conexion.text_factory = lambda x: unicode(x, "utf-8", "ignore")
atexit.register(self.close)
try:
sql = "CREATE TABLE datos( DATO TEXT );"
self._conexion.cursor().execute(sql)
except:
pass
def append(self, valor):
sql = "INSERT INTO datos( DATO ) VALUES( ? )"
liValores = [cPickle.dumps(valor)]
cursor = self._conexion.cursor()
cursor.execute(sql, liValores)
cursor.close()
self._conexion.commit()
def __getitem__(self, xid):
sql = "select DATO from datos where ROWID=%d" % (xid + 1,)
cursor = self._conexion.cursor()
cursor.execute(sql)
dato = cursor.fetchone()
return cPickle.loads(str(dato[0]))
def __len__(self):
sql = "select COUNT(DATO) from datos"
cursor = self._conexion.cursor()
cursor.execute(sql)
resp = cursor.fetchone()
cursor.close()
return resp[0]
def close(self):
if self._conexion:
self._conexion.close()
self._conexion = None
class Cursor:
def __init__(self, nomDB, conexion=None, with_commit=False):
self.nomDB = nomDB
self.conexion = conexion
self.cursor = None
self.close_conexion = conexion is None
self.with_commit = with_commit
def __enter__(self):
if self.conexion is None:
self.conexion = sqlite3.connect(self.nomDB)
self.cursor = self.conexion.cursor()
return self.cursor
def __exit__(self, type, value, traceback):
if self.cursor:
self.cursor.close()
if self.conexion and self.close_conexion:
if self.with_commit:
self.conexion.commit()
self.conexion.close()
class ListSQL(object):
def __init__(self, nomDB, tabla="Datos", auto_closed=True):
self.nomDB = nomDB
self.tabla = tabla
if not auto_closed:
self.conexion = sqlite3.connect(nomDB)
atexit.register(self.close)
else:
self.conexion = None
self.test_exist()
self.liRowIDs = self.leeIDs()
self.auto_closed = auto_closed
def close(self):
if self.conexion:
self.conexion.close()
self.conexion = None
def get_cursor(self):
return Cursor(self.nomDB, conexion=self.conexion)
def get_cursor_commit(self):
return Cursor(self.nomDB, with_commit=True, conexion=self.conexion)
def test_exist(self):
with self.get_cursor() as cursor:
cursor.execute("pragma table_info(%s)" % self.tabla)
liCampos = cursor.fetchall()
if not liCampos:
sql = "CREATE TABLE %s( DATO BLOB );" % self.tabla
cursor.execute(sql)
def leeIDs(self):
with self.get_cursor() as cursor:
sql = "SELECT ROWID FROM %s" % self.tabla
cursor.execute(sql)
li = [rowid for (rowid,) in cursor.fetchall()]
return li
def append(self, valor):
with self.get_cursor_commit() as cursor:
sql = "INSERT INTO %s( DATO ) VALUES( ? )" % self.tabla
cursor.execute(sql, [var2blob(valor)])
self.liRowIDs.append(cursor.lastrowid)
def __setitem__(self, num, valor):
if num < len(self.liRowIDs):
with self.get_cursor_commit() as cursor:
sql = "UPDATE %s SET dato=? WHERE ROWID = ?" % self.tabla
rowid = self.liRowIDs[num]
cursor.execute(sql, [var2blob(valor), rowid])
def __delitem__(self, num):
if num < len(self.liRowIDs):
with self.get_cursor_commit() as cursor:
sql = "DELETE FROM %s WHERE ROWID= ?" % self.tabla
rowid = self.liRowIDs[num]
cursor.execute(
sql,
[
rowid,
],
)
del self.liRowIDs[num]
def __getitem__(self, num):
if num < len(self.liRowIDs):
with self.get_cursor() as cursor:
sql = "SELECT dato FROM %s WHERE ROWID= ?" % self.tabla
rowid = self.liRowIDs[num]
cursor.execute(
sql,
[
rowid,
],
)
li = cursor.fetchone()
return blob2var(li[0])
def __len__(self):
return len(self.liRowIDs)
def __iter__(self):
self.num = 0
self.max = len(self.liRowIDs)
return self
def next(self):
if self.num < self.max:
result = self.__getitem__(self.num)
self.num += 1
return result
else:
raise StopIteration
def pack(self):
with self.get_cursor_commit() as cursor:
cursor.execute("VACUUM")
def zap(self):
with self.get_cursor_commit() as cursor:
cursor.execute("DELETE FROM %s" % self.tabla)
cursor.execute("VACUUM")
self.liRowIDs = []
class DicRaw:
def __init__(self, nomDB, tabla="Data"):
self.table = tabla
self._conexion = sqlite3.connect(nomDB)
atexit.register(self.close)
cursor = self._conexion.cursor()
cursor.execute("pragma table_info(%s)" % tabla)
if not cursor.fetchall():
sql = "CREATE TABLE %s( KEY TEXT PRIMARY KEY, VALUE TEXT );" % tabla
cursor.execute(sql)
self._conexion.commit()
cursor.close()
def __setitem__(self, key, obj):
key = str(key)
if self.__contains__(key):
sql = "UPDATE %s SET VALUE=? WHERE KEY = ?" % self.table
else:
sql = "INSERT INTO %s (VALUE,KEY) values(?,?)" % self.table
cursor = self._conexion.cursor()
dato = base64.encodestring(cPickle.dumps(obj))
cursor.execute(sql, (dato, key))
cursor.close()
self._conexion.commit()
def __getitem__(self, key):
cursor = self._conexion.cursor()
sql = "SELECT VALUE FROM %s WHERE KEY= ?" % self.table
cursor.execute(sql, (key,))
li = cursor.fetchone()
cursor.close()
if not li:
return None
dato = base64.decodestring(li[0])
return cPickle.loads(dato)
def __delitem__(self, key):
cursor = self._conexion.cursor()
sql = "DELETE FROM %s WHERE KEY= ?" % self.table
cursor.execute(sql, (key,))
cursor.close()
self._conexion.commit()
def __contains__(self, key):
cursor = self._conexion.cursor()
sql = "SELECT KEY FROM %s WHERE KEY= ?" % self.table
cursor.execute(sql, (key,))
li = cursor.fetchone()
cursor.close()
return True if li else False
def close(self):
if self._conexion:
self._conexion.close()
self._conexion = None
def get(self, key, default):
v = self.__getitem__(key)
return v if v else default
def pack(self):
cursor = self._conexion.cursor()
cursor.execute("VACUUM")
cursor.close()
self._conexion.commit()
def __len__(self):
return len(self.keys())
def keys(self):
cursor = self._conexion.cursor()
sql = "SELECT KEY FROM %s" % self.table
cursor.execute(sql)
liKeys = [reg[0] for reg in cursor.fetchall()]
cursor.close()
return liKeys
def __enter__(self):
return self
def __exit__(self, xtype, value, traceback):
self.close()
class DicBLOB(object):
def __init__(self, nomDB, tabla="Datos"):
self._conexion = sqlite3.connect(nomDB)
atexit.register(self.close)
cursor = self._conexion.cursor()
cursor.execute("pragma table_info(%s)" % tabla)
liCampos = cursor.fetchall()
if not liCampos:
sql = "CREATE TABLE %s( CLAVE TEXT PRIMARY KEY, DATO BLOB );" % tabla
cursor.execute(sql)
self._conexion.commit()
cursor.close()
self.stdic = set()
self.tabla = tabla
self.leeClaves()
def leeClaves(self):
cursor = self._conexion.cursor()
sql = "SELECT clave FROM %s" % self.tabla
cursor.execute(sql)
li = cursor.fetchall()
for (clave,) in li:
self.stdic.add(clave)
cursor.close()
def __contains__(self, clave):
return clave in self.stdic
def __setitem__(self, clave, wav):
cursor = self._conexion.cursor()
dato = sqlite3.Binary(wav)
liValores = [dato, clave]
if self.__contains__(clave):
sql = "UPDATE %s SET dato=? WHERE clave = ?" % self.tabla
else:
sql = "INSERT INTO %s (dato,clave) values(?,?)" % self.tabla
self.stdic.add(clave)
cursor.execute(sql, liValores)
cursor.close()
self._conexion.commit()
def __delitem__(self, clave):
cursor = self._conexion.cursor()
sql = "DELETE FROM %s WHERE clave= ?" % self.tabla
liValores = [
clave,
]
cursor.execute(sql, liValores)
cursor.close()
self._conexion.commit()
self.stdic.remove(clave)
def __getitem__(self, clave):
if clave in self.stdic:
cursor = self._conexion.cursor()
sql = "SELECT dato FROM %s WHERE clave= ?" % self.tabla
liValores = [
clave,
]
cursor.execute(sql, liValores)
li = cursor.fetchone()
cursor.close()
return li[0]
else:
return None
def __len__(self):
return len(self.stdic)
def close(self):
if self._conexion:
self._conexion.close()
self._conexion = None
def keys(self):
return list(self.stdic)
def get(self, clave, default):
if clave in self.stdic:
return self.__getitem__(clave)
else:
return default
class Timekeeper:
def __init__(self):
self._begin = None
def start(self):
self._begin = time.time()
def stop(self):
if self._begin:
b = self._begin
self._begin = None
return time.time() - b
class OpenCodec:
def __init__(self, path):
with open(path) as f:
u = chardet.universaldetector.UniversalDetector()
for n, x in enumerate(f):
u.feed(x)
if n == 500:
break
u.close()
encoding = u.result.get("encoding", "latin-1")
self.f = codecs.open(path, "r", encoding, "ignore")
def __enter__(self):
return self.f
def __exit__(self, xtype, value, traceback):
self.f.close()
def txt_encoding(txt):
u = chardet.universaldetector.UniversalDetector()
u.feed(txt)
u.close()
return u.result.get("encoding", "latin-1")
def file_encoding(fich, chunk=3000):
with open(fich) as f:
u = chardet.universaldetector.UniversalDetector()
u.feed(f.read(chunk))
u.close()
return u.result.get("encoding", "ascii")
class RowidReader:
def __init__(self, nomFichero, tabla):
self.nomFichero = nomFichero
self.tabla = tabla
self.where = None
self.order = None
self.running = False
self.liRowids = []
self.chunk = 2000
def setOrder(self, order):
self.order = order
def setWhere(self, where):
self.where = where
def run(self, liRowids, filter, order):
self.stopnow()
self.where = filter
self.order = order
self.running = True
self.stop = False
self.liRowids = liRowids
self.lock = threading.Lock()
self.thread = threading.Thread(target=self._run_thread)
self.thread.daemon = True
self.thread.start()
def _run_thread(self):
conexion = sqlite3.connect(self.nomFichero)
sql = "SELECT ROWID FROM %s" % self.tabla
if self.where:
sql += " WHERE %s" % self.where
if self.order:
sql += " ORDER BY %s" % self.order
cursor = conexion.cursor()
cursor.execute(sql)
ch = random.randint(100, 300)
while not self.stop:
li = cursor.fetchmany(ch)
if li:
self.lock.acquire()
self.liRowids.extend([x[0] for x in li])
self.lock.release()
if len(li) < ch:
break
ch = self.chunk
cursor.close()
conexion.close()
self.running = False
def terminado(self):
return not self.running
def stopnow(self):
if self.running:
self.stop = True
self.thread.join()
def reccount(self):
return len(self.liRowids)
def is64Windows():
return "PROGRAMFILES(X86)" in os.environ
class Log:
def __init__(self, logname):
self.logname = logname
def write(self, buf):
ferr = open(self.logname, "at")
ferr.write(buf)
ferr.close()
|
protos | image_resizer_pb2 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: object_detection/protos/image_resizer.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pb2
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import enum_type_wrapper
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name="object_detection/protos/image_resizer.proto",
package="object_detection.protos",
syntax="proto2",
serialized_pb=_b(
'\n+object_detection/protos/image_resizer.proto\x12\x17object_detection.protos"\xc6\x01\n\x0cImageResizer\x12T\n\x19keep_aspect_ratio_resizer\x18\x01 \x01(\x0b\x32/.object_detection.protos.KeepAspectRatioResizerH\x00\x12I\n\x13\x66ixed_shape_resizer\x18\x02 \x01(\x0b\x32*.object_detection.protos.FixedShapeResizerH\x00\x42\x15\n\x13image_resizer_oneof"\x97\x01\n\x16KeepAspectRatioResizer\x12\x1a\n\rmin_dimension\x18\x01 \x01(\x05:\x03\x36\x30\x30\x12\x1b\n\rmax_dimension\x18\x02 \x01(\x05:\x04\x31\x30\x32\x34\x12\x44\n\rresize_method\x18\x03 \x01(\x0e\x32#.object_detection.protos.ResizeType:\x08\x42ILINEAR"\x82\x01\n\x11\x46ixedShapeResizer\x12\x13\n\x06height\x18\x01 \x01(\x05:\x03\x33\x30\x30\x12\x12\n\x05width\x18\x02 \x01(\x05:\x03\x33\x30\x30\x12\x44\n\rresize_method\x18\x03 \x01(\x0e\x32#.object_detection.protos.ResizeType:\x08\x42ILINEAR*G\n\nResizeType\x12\x0c\n\x08\x42ILINEAR\x10\x00\x12\x14\n\x10NEAREST_NEIGHBOR\x10\x01\x12\x0b\n\x07\x42ICUBIC\x10\x02\x12\x08\n\x04\x41REA\x10\x03'
),
)
_RESIZETYPE = _descriptor.EnumDescriptor(
name="ResizeType",
full_name="object_detection.protos.ResizeType",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="BILINEAR", index=0, number=0, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="NEAREST_NEIGHBOR", index=1, number=1, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="BICUBIC", index=2, number=2, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="AREA", index=3, number=3, options=None, type=None
),
],
containing_type=None,
options=None,
serialized_start=560,
serialized_end=631,
)
_sym_db.RegisterEnumDescriptor(_RESIZETYPE)
ResizeType = enum_type_wrapper.EnumTypeWrapper(_RESIZETYPE)
BILINEAR = 0
NEAREST_NEIGHBOR = 1
BICUBIC = 2
AREA = 3
_IMAGERESIZER = _descriptor.Descriptor(
name="ImageResizer",
full_name="object_detection.protos.ImageResizer",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="keep_aspect_ratio_resizer",
full_name="object_detection.protos.ImageResizer.keep_aspect_ratio_resizer",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="fixed_shape_resizer",
full_name="object_detection.protos.ImageResizer.fixed_shape_resizer",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="image_resizer_oneof",
full_name="object_detection.protos.ImageResizer.image_resizer_oneof",
index=0,
containing_type=None,
fields=[],
),
],
serialized_start=73,
serialized_end=271,
)
_KEEPASPECTRATIORESIZER = _descriptor.Descriptor(
name="KeepAspectRatioResizer",
full_name="object_detection.protos.KeepAspectRatioResizer",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="min_dimension",
full_name="object_detection.protos.KeepAspectRatioResizer.min_dimension",
index=0,
number=1,
type=5,
cpp_type=1,
label=1,
has_default_value=True,
default_value=600,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="max_dimension",
full_name="object_detection.protos.KeepAspectRatioResizer.max_dimension",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=True,
default_value=1024,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="resize_method",
full_name="object_detection.protos.KeepAspectRatioResizer.resize_method",
index=2,
number=3,
type=14,
cpp_type=8,
label=1,
has_default_value=True,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=274,
serialized_end=425,
)
_FIXEDSHAPERESIZER = _descriptor.Descriptor(
name="FixedShapeResizer",
full_name="object_detection.protos.FixedShapeResizer",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="height",
full_name="object_detection.protos.FixedShapeResizer.height",
index=0,
number=1,
type=5,
cpp_type=1,
label=1,
has_default_value=True,
default_value=300,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="width",
full_name="object_detection.protos.FixedShapeResizer.width",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=True,
default_value=300,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="resize_method",
full_name="object_detection.protos.FixedShapeResizer.resize_method",
index=2,
number=3,
type=14,
cpp_type=8,
label=1,
has_default_value=True,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=428,
serialized_end=558,
)
_IMAGERESIZER.fields_by_name[
"keep_aspect_ratio_resizer"
].message_type = _KEEPASPECTRATIORESIZER
_IMAGERESIZER.fields_by_name["fixed_shape_resizer"].message_type = _FIXEDSHAPERESIZER
_IMAGERESIZER.oneofs_by_name["image_resizer_oneof"].fields.append(
_IMAGERESIZER.fields_by_name["keep_aspect_ratio_resizer"]
)
_IMAGERESIZER.fields_by_name[
"keep_aspect_ratio_resizer"
].containing_oneof = _IMAGERESIZER.oneofs_by_name["image_resizer_oneof"]
_IMAGERESIZER.oneofs_by_name["image_resizer_oneof"].fields.append(
_IMAGERESIZER.fields_by_name["fixed_shape_resizer"]
)
_IMAGERESIZER.fields_by_name[
"fixed_shape_resizer"
].containing_oneof = _IMAGERESIZER.oneofs_by_name["image_resizer_oneof"]
_KEEPASPECTRATIORESIZER.fields_by_name["resize_method"].enum_type = _RESIZETYPE
_FIXEDSHAPERESIZER.fields_by_name["resize_method"].enum_type = _RESIZETYPE
DESCRIPTOR.message_types_by_name["ImageResizer"] = _IMAGERESIZER
DESCRIPTOR.message_types_by_name["KeepAspectRatioResizer"] = _KEEPASPECTRATIORESIZER
DESCRIPTOR.message_types_by_name["FixedShapeResizer"] = _FIXEDSHAPERESIZER
DESCRIPTOR.enum_types_by_name["ResizeType"] = _RESIZETYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ImageResizer = _reflection.GeneratedProtocolMessageType(
"ImageResizer",
(_message.Message,),
dict(
DESCRIPTOR=_IMAGERESIZER,
__module__="object_detection.protos.image_resizer_pb2",
# @@protoc_insertion_point(class_scope:object_detection.protos.ImageResizer)
),
)
_sym_db.RegisterMessage(ImageResizer)
KeepAspectRatioResizer = _reflection.GeneratedProtocolMessageType(
"KeepAspectRatioResizer",
(_message.Message,),
dict(
DESCRIPTOR=_KEEPASPECTRATIORESIZER,
__module__="object_detection.protos.image_resizer_pb2",
# @@protoc_insertion_point(class_scope:object_detection.protos.KeepAspectRatioResizer)
),
)
_sym_db.RegisterMessage(KeepAspectRatioResizer)
FixedShapeResizer = _reflection.GeneratedProtocolMessageType(
"FixedShapeResizer",
(_message.Message,),
dict(
DESCRIPTOR=_FIXEDSHAPERESIZER,
__module__="object_detection.protos.image_resizer_pb2",
# @@protoc_insertion_point(class_scope:object_detection.protos.FixedShapeResizer)
),
)
_sym_db.RegisterMessage(FixedShapeResizer)
# @@protoc_insertion_point(module_scope)
|
Notifications | setup | #
# Copyright (C) 2009-2010 Pedro Algarvio <pedro@algarvio.me>
#
# Basic plugin template created by:
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2009 Damien Churchill <damoxc@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
from setuptools import find_packages, setup
__plugin_name__ = "Notifications"
__author__ = "Pedro Algarvio"
__author_email__ = "pedro@algarvio.me"
__version__ = "0.4"
__url__ = "http://dev.deluge-torrent.org/"
__license__ = "GPLv3"
__description__ = "Plugin which provides notifications to Deluge."
__long_description__ = """
Plugin which provides notifications to Deluge
Email, Popup, Blink and Sound notifications
The plugin also allows other plugins to make
use of itself for their own custom notifications
"""
__pkg_data__ = {"deluge_" + __plugin_name__.lower(): ["data/*"]}
setup(
name=__plugin_name__,
version=__version__,
description=__description__,
author=__author__,
author_email=__author_email__,
url=__url__,
license=__license__,
long_description=__long_description__ if __long_description__ else __description__,
packages=find_packages(),
package_data=__pkg_data__,
entry_points="""
[deluge.plugin.core]
%s = deluge_%s:CorePlugin
[deluge.plugin.gtk3ui]
%s = deluge_%s:GtkUIPlugin
[deluge.plugin.web]
%s = deluge_%s:WebUIPlugin
"""
% ((__plugin_name__, __plugin_name__.lower()) * 3),
)
|
deluge-autoadd | gtkui | #
# Copyright (C) 2009 GazpachoKing <chase.sterling@gmail.com>
#
# Basic plugin template created by:
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2009 Damien Churchill <damoxc@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import os
import gi # isort:skip (Required before Gtk import).
gi.require_version("Gtk", "3.0")
# isort:imports-thirdparty
import deluge.common
import deluge.component as component
from deluge.plugins.pluginbase import Gtk3PluginBase
from deluge.ui.client import client
from deluge.ui.gtk3 import dialogs
from gi.repository import Gtk
# isort:imports-firstparty
# isort:imports-localfolder
from .common import get_resource
log = logging.getLogger(__name__)
class IncompatibleOption(Exception):
pass
class OptionsDialog:
spin_ids = ["max_download_speed", "max_upload_speed", "stop_ratio"]
spin_int_ids = ["max_upload_slots", "max_connections"]
chk_ids = [
"stop_at_ratio",
"remove_at_ratio",
"move_completed",
"add_paused",
"auto_managed",
"queue_to_top",
]
def __init__(self):
self.accounts = Gtk.ListStore(str)
self.labels = Gtk.ListStore(str)
self.core_config = {}
def show(self, options=None, watchdir_id=None):
if options is None:
options = {}
self.builder = Gtk.Builder()
self.builder.add_from_file(get_resource("autoadd_options.ui"))
self.builder.connect_signals(
{
"on_opts_add": self.on_add,
"on_opts_apply": self.on_apply,
"on_opts_cancel": self.on_cancel,
"on_options_dialog_close": self.on_cancel,
"on_toggle_toggled": self.on_toggle_toggled,
}
)
self.dialog = self.builder.get_object("options_dialog")
self.dialog.set_transient_for(component.get("Preferences").pref_dialog)
if watchdir_id:
# We have an existing watchdir_id, we are editing
self.builder.get_object("opts_add_button").hide()
self.builder.get_object("opts_apply_button").show()
self.watchdir_id = watchdir_id
else:
# We don't have an id, adding
self.builder.get_object("opts_add_button").show()
self.builder.get_object("opts_apply_button").hide()
self.watchdir_id = None
self.load_options(options)
self.dialog.run()
def load_options(self, options):
self.builder.get_object("enabled").set_active(options.get("enabled", True))
self.builder.get_object("append_extension_toggle").set_active(
options.get("append_extension_toggle", False)
)
self.builder.get_object("append_extension").set_text(
options.get("append_extension", ".added")
)
self.builder.get_object("download_location_toggle").set_active(
options.get("download_location_toggle", False)
)
self.builder.get_object("copy_torrent_toggle").set_active(
options.get("copy_torrent_toggle", False)
)
self.builder.get_object("delete_copy_torrent_toggle").set_active(
options.get("delete_copy_torrent_toggle", False)
)
self.builder.get_object("seed_mode").set_active(options.get("seed_mode", False))
self.accounts.clear()
self.labels.clear()
combobox = self.builder.get_object("OwnerCombobox")
combobox_render = Gtk.CellRendererText()
combobox.pack_start(combobox_render, True)
combobox.add_attribute(combobox_render, "text", 0)
combobox.set_model(self.accounts)
label_widget = self.builder.get_object("label")
label_widget.get_child().set_text(options.get("label", ""))
label_widget.set_model(self.labels)
label_widget.set_entry_text_column(0)
self.builder.get_object("label_toggle").set_active(
options.get("label_toggle", False)
)
for spin_id in self.spin_ids + self.spin_int_ids:
self.builder.get_object(spin_id).set_value(options.get(spin_id, 0))
self.builder.get_object(spin_id + "_toggle").set_active(
options.get(spin_id + "_toggle", False)
)
for chk_id in self.chk_ids:
self.builder.get_object(chk_id).set_active(bool(options.get(chk_id, True)))
self.builder.get_object(chk_id + "_toggle").set_active(
options.get(chk_id + "_toggle", False)
)
if not options.get("add_paused", True):
self.builder.get_object("isnt_add_paused").set_active(True)
if not options.get("queue_to_top", True):
self.builder.get_object("isnt_queue_to_top").set_active(True)
if not options.get("auto_managed", True):
self.builder.get_object("isnt_auto_managed").set_active(True)
for field in [
"move_completed_path",
"path",
"download_location",
"copy_torrent",
]:
if client.is_localhost():
self.builder.get_object(field + "_chooser").set_current_folder(
options.get(field, os.path.expanduser("~"))
)
self.builder.get_object(field + "_chooser").show()
self.builder.get_object(field + "_entry").hide()
else:
self.builder.get_object(field + "_entry").set_text(
options.get(field, "")
)
self.builder.get_object(field + "_entry").show()
self.builder.get_object(field + "_chooser").hide()
self.set_sensitive()
def on_core_config(config):
if client.is_localhost():
self.builder.get_object("download_location_chooser").set_current_folder(
options.get("download_location", config["download_location"])
)
if options.get("move_completed_toggle", config["move_completed"]):
self.builder.get_object("move_completed_toggle").set_active(True)
self.builder.get_object(
"move_completed_path_chooser"
).set_current_folder(
options.get(
"move_completed_path", config["move_completed_path"]
)
)
if options.get("copy_torrent_toggle", config["copy_torrent_file"]):
self.builder.get_object("copy_torrent_toggle").set_active(True)
self.builder.get_object("copy_torrent_chooser").set_current_folder(
options.get("copy_torrent", config["torrentfiles_location"])
)
else:
self.builder.get_object("download_location_entry").set_text(
options.get("download_location", config["download_location"])
)
if options.get("move_completed_toggle", config["move_completed"]):
self.builder.get_object("move_completed_toggle").set_active(
options.get("move_completed_toggle", False)
)
self.builder.get_object("move_completed_path_entry").set_text(
options.get(
"move_completed_path", config["move_completed_path"]
)
)
if options.get("copy_torrent_toggle", config["copy_torrent_file"]):
self.builder.get_object("copy_torrent_toggle").set_active(True)
self.builder.get_object("copy_torrent_entry").set_text(
options.get("copy_torrent", config["torrentfiles_location"])
)
if options.get(
"delete_copy_torrent_toggle", config["del_copy_torrent_file"]
):
self.builder.get_object("delete_copy_torrent_toggle").set_active(True)
if not options:
client.core.get_config().addCallback(on_core_config)
def on_accounts(accounts, owner):
log.debug("Got Accounts")
selected_iter = None
for account in accounts:
acc_iter = self.accounts.append()
self.accounts.set_value(acc_iter, 0, account["username"])
if account["username"] == owner:
selected_iter = acc_iter
self.builder.get_object("OwnerCombobox").set_active_iter(selected_iter)
def on_accounts_failure(failure):
log.debug("Failed to get accounts!!! %s", failure)
acc_iter = self.accounts.append()
self.accounts.set_value(acc_iter, 0, client.get_auth_user())
self.builder.get_object("OwnerCombobox").set_active(0)
self.builder.get_object("OwnerCombobox").set_sensitive(False)
def on_labels(labels):
log.debug("Got Labels: %s", labels)
for label in labels:
self.labels.set_value(self.labels.append(), 0, label)
label_widget = self.builder.get_object("label")
label_widget.set_model(self.labels)
label_widget.set_entry_text_column(0)
def on_failure(failure):
log.exception(failure)
def on_get_enabled_plugins(result):
if "Label" in result:
self.builder.get_object("label_frame").show()
client.label.get_labels().addCallback(on_labels).addErrback(on_failure)
else:
self.builder.get_object("label_frame").hide()
self.builder.get_object("label_toggle").set_active(False)
client.core.get_enabled_plugins().addCallback(on_get_enabled_plugins)
if client.get_auth_level() == deluge.common.AUTH_LEVEL_ADMIN:
client.core.get_known_accounts().addCallback(
on_accounts, options.get("owner", client.get_auth_user())
).addErrback(on_accounts_failure)
else:
acc_iter = self.accounts.append()
self.accounts.set_value(acc_iter, 0, client.get_auth_user())
self.builder.get_object("OwnerCombobox").set_active(0)
self.builder.get_object("OwnerCombobox").set_sensitive(False)
def set_sensitive(self):
maintoggles = [
"download_location",
"append_extension",
"move_completed",
"label",
"max_download_speed",
"max_upload_speed",
"max_connections",
"max_upload_slots",
"add_paused",
"auto_managed",
"stop_at_ratio",
"queue_to_top",
"copy_torrent",
]
for maintoggle in maintoggles:
self.on_toggle_toggled(self.builder.get_object(maintoggle + "_toggle"))
def on_toggle_toggled(self, tb):
toggle = tb.get_name().replace("_toggle", "")
isactive = tb.get_active()
if toggle == "download_location":
self.builder.get_object("download_location_chooser").set_sensitive(isactive)
self.builder.get_object("download_location_entry").set_sensitive(isactive)
elif toggle == "append_extension":
self.builder.get_object("append_extension").set_sensitive(isactive)
elif toggle == "copy_torrent":
self.builder.get_object("copy_torrent_entry").set_sensitive(isactive)
self.builder.get_object("copy_torrent_chooser").set_sensitive(isactive)
self.builder.get_object("delete_copy_torrent_toggle").set_sensitive(
isactive
)
elif toggle == "move_completed":
self.builder.get_object("move_completed_path_chooser").set_sensitive(
isactive
)
self.builder.get_object("move_completed_path_entry").set_sensitive(isactive)
self.builder.get_object("move_completed").set_active(isactive)
elif toggle == "label":
self.builder.get_object("label").set_sensitive(isactive)
elif toggle == "max_download_speed":
self.builder.get_object("max_download_speed").set_sensitive(isactive)
elif toggle == "max_upload_speed":
self.builder.get_object("max_upload_speed").set_sensitive(isactive)
elif toggle == "max_connections":
self.builder.get_object("max_connections").set_sensitive(isactive)
elif toggle == "max_upload_slots":
self.builder.get_object("max_upload_slots").set_sensitive(isactive)
elif toggle == "add_paused":
self.builder.get_object("add_paused").set_sensitive(isactive)
self.builder.get_object("isnt_add_paused").set_sensitive(isactive)
elif toggle == "queue_to_top":
self.builder.get_object("queue_to_top").set_sensitive(isactive)
self.builder.get_object("isnt_queue_to_top").set_sensitive(isactive)
elif toggle == "auto_managed":
self.builder.get_object("auto_managed").set_sensitive(isactive)
self.builder.get_object("isnt_auto_managed").set_sensitive(isactive)
elif toggle == "stop_at_ratio":
self.builder.get_object("remove_at_ratio_toggle").set_active(isactive)
self.builder.get_object("stop_ratio_toggle").set_active(isactive)
self.builder.get_object("stop_at_ratio").set_active(isactive)
self.builder.get_object("stop_ratio").set_sensitive(isactive)
self.builder.get_object("remove_at_ratio").set_sensitive(isactive)
def on_apply(self, event=None):
try:
options = self.generate_opts()
client.autoadd.set_options(str(self.watchdir_id), options).addCallbacks(
self.on_added, self.on_error_show
)
except IncompatibleOption as ex:
dialogs.ErrorDialog(_("Incompatible Option"), str(ex), self.dialog).run()
def on_error_show(self, result):
d = dialogs.ErrorDialog(_("Error"), result.value.message, self.dialog)
result.cleanFailure()
d.run()
def on_added(self, result):
self.dialog.destroy()
def on_add(self, event=None):
try:
options = self.generate_opts()
client.autoadd.add(options).addCallbacks(self.on_added, self.on_error_show)
except IncompatibleOption as ex:
dialogs.ErrorDialog(_("Incompatible Option"), str(ex), self.dialog).run()
def on_cancel(self, event=None):
self.dialog.destroy()
def generate_opts(self):
# generate options dict based on gtk objects
options = {}
options["enabled"] = self.builder.get_object("enabled").get_active()
if client.is_localhost():
options["path"] = self.builder.get_object("path_chooser").get_filename()
options["download_location"] = self.builder.get_object(
"download_location_chooser"
).get_filename()
options["move_completed_path"] = self.builder.get_object(
"move_completed_path_chooser"
).get_filename()
options["copy_torrent"] = self.builder.get_object(
"copy_torrent_chooser"
).get_filename()
else:
options["path"] = self.builder.get_object("path_entry").get_text()
options["download_location"] = self.builder.get_object(
"download_location_entry"
).get_text()
options["move_completed_path"] = self.builder.get_object(
"move_completed_path_entry"
).get_text()
options["copy_torrent"] = self.builder.get_object(
"copy_torrent_entry"
).get_text()
options["label"] = (
self.builder.get_object("label").get_child().get_text().lower()
)
options["append_extension"] = self.builder.get_object(
"append_extension"
).get_text()
options["owner"] = self.accounts[
self.builder.get_object("OwnerCombobox").get_active()
][0]
for key in [
"append_extension_toggle",
"download_location_toggle",
"label_toggle",
"copy_torrent_toggle",
"delete_copy_torrent_toggle",
"seed_mode",
]:
options[key] = self.builder.get_object(key).get_active()
for spin_id in self.spin_ids:
options[spin_id] = self.builder.get_object(spin_id).get_value()
options[spin_id + "_toggle"] = self.builder.get_object(
spin_id + "_toggle"
).get_active()
for spin_int_id in self.spin_int_ids:
options[spin_int_id] = self.builder.get_object(
spin_int_id
).get_value_as_int()
options[spin_int_id + "_toggle"] = self.builder.get_object(
spin_int_id + "_toggle"
).get_active()
for chk_id in self.chk_ids:
options[chk_id] = self.builder.get_object(chk_id).get_active()
options[chk_id + "_toggle"] = self.builder.get_object(
chk_id + "_toggle"
).get_active()
if (
options["copy_torrent_toggle"]
and options["path"] == options["copy_torrent"]
):
raise IncompatibleOption(
_(
'"Watch Folder" directory and "Copy of .torrent'
' files to" directory cannot be the same!'
)
)
return options
class GtkUI(Gtk3PluginBase):
def enable(self):
self.builder = Gtk.Builder()
self.builder.add_from_file(get_resource("config.ui"))
self.builder.connect_signals(self)
self.opts_dialog = OptionsDialog()
component.get("PluginManager").register_hook(
"on_apply_prefs", self.on_apply_prefs
)
component.get("PluginManager").register_hook(
"on_show_prefs", self.on_show_prefs
)
client.register_event_handler(
"AutoaddOptionsChangedEvent", self.on_options_changed_event
)
self.watchdirs = {}
vbox = self.builder.get_object("watchdirs_vbox")
sw = Gtk.ScrolledWindow()
sw.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
vbox.pack_start(sw, True, True, 0)
self.store = self.create_model()
self.treeView = Gtk.TreeView(self.store)
self.treeView.connect("cursor-changed", self.on_listitem_activated)
self.treeView.connect("row-activated", self.on_edit_button_clicked)
self.create_columns(self.treeView)
sw.add(self.treeView)
sw.show_all()
component.get("Preferences").add_page(
_("AutoAdd"), self.builder.get_object("prefs_box")
)
def disable(self):
component.get("Preferences").remove_page(_("AutoAdd"))
component.get("PluginManager").deregister_hook(
"on_apply_prefs", self.on_apply_prefs
)
component.get("PluginManager").deregister_hook(
"on_show_prefs", self.on_show_prefs
)
def create_model(self):
store = Gtk.ListStore(str, bool, str, str)
for watchdir_id, watchdir in self.watchdirs.items():
store.append(
[
watchdir_id,
watchdir["enabled"],
watchdir.get("owner", "localclient"),
watchdir["path"],
]
)
return store
def create_columns(self, treeview):
renderer_toggle = Gtk.CellRendererToggle()
column = Gtk.TreeViewColumn(
_("Active"), renderer_toggle, activatable=1, active=1
)
column.set_sort_column_id(1)
treeview.append_column(column)
tt = Gtk.Tooltip()
tt.set_text(_("Double-click to toggle"))
treeview.set_tooltip_cell(tt, None, None, renderer_toggle)
renderertext = Gtk.CellRendererText()
column = Gtk.TreeViewColumn(_("Owner"), renderertext, text=2)
column.set_sort_column_id(2)
treeview.append_column(column)
tt2 = Gtk.Tooltip()
tt2.set_text(_("Double-click to edit"))
treeview.set_has_tooltip(True)
renderertext = Gtk.CellRendererText()
column = Gtk.TreeViewColumn(_("Path"), renderertext, text=3)
column.set_sort_column_id(3)
treeview.append_column(column)
tt2 = Gtk.Tooltip()
tt2.set_text(_("Double-click to edit"))
treeview.set_has_tooltip(True)
def load_watchdir_list(self):
pass
def add_watchdir_entry(self):
pass
def on_add_button_clicked(self, event=None):
# display options_window
self.opts_dialog.show()
def on_remove_button_clicked(self, event=None):
tree, tree_id = self.treeView.get_selection().get_selected()
watchdir_id = str(self.store.get_value(tree_id, 0))
if watchdir_id:
client.autoadd.remove(watchdir_id)
def on_edit_button_clicked(self, event=None, a=None, col=None):
tree, tree_id = self.treeView.get_selection().get_selected()
watchdir_id = str(self.store.get_value(tree_id, 0))
if watchdir_id:
if col and col.get_title() == _("Active"):
if self.watchdirs[watchdir_id]["enabled"]:
client.autoadd.disable_watchdir(watchdir_id)
else:
client.autoadd.enable_watchdir(watchdir_id)
else:
self.opts_dialog.show(self.watchdirs[watchdir_id], watchdir_id)
def on_listitem_activated(self, treeview):
tree, tree_id = self.treeView.get_selection().get_selected()
if tree_id:
self.builder.get_object("edit_button").set_sensitive(True)
self.builder.get_object("remove_button").set_sensitive(True)
else:
self.builder.get_object("edit_button").set_sensitive(False)
self.builder.get_object("remove_button").set_sensitive(False)
def on_apply_prefs(self):
log.debug("applying prefs for AutoAdd")
for watchdir_id, watchdir in self.watchdirs.items():
client.autoadd.set_options(watchdir_id, watchdir)
def on_show_prefs(self):
client.autoadd.get_watchdirs().addCallback(self.cb_get_config)
def on_options_changed_event(self):
client.autoadd.get_watchdirs().addCallback(self.cb_get_config)
def cb_get_config(self, watchdirs):
"""callback for on show_prefs"""
log.trace("Got whatchdirs from core: %s", watchdirs)
self.watchdirs = watchdirs or {}
self.store.clear()
for watchdir_id, watchdir in self.watchdirs.items():
self.store.append(
[
watchdir_id,
watchdir["enabled"],
watchdir.get("owner", "localclient"),
watchdir["path"],
]
)
# Workaround for cached glade signal appearing when re-enabling plugin in same session
if self.builder.get_object("edit_button"):
# Disable the remove and edit buttons, because nothing in the store is selected
self.builder.get_object("remove_button").set_sensitive(False)
self.builder.get_object("edit_button").set_sensitive(False)
|
draftfunctions | scale | # ***************************************************************************
# * Copyright (c) 2009, 2010 Yorik van Havre <yorik@uncreated.net> *
# * Copyright (c) 2009, 2010 Ken Cline <cline@frii.com> *
# * Copyright (c) 2020 FreeCAD Developers *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provides functions to scale shapes."""
## @package scale
# \ingroup draftfunctions
# \brief Provides functions to scale shapes.
import draftfunctions.join as join
import draftmake.make_copy as make_copy
import draftmake.make_line as make_line
import draftutils.gui_utils as gui_utils
import draftutils.utils as utils
import DraftVecUtils
## \addtogroup draftfunctions
# @{
import FreeCAD as App
def scale(
objectslist, scale=App.Vector(1, 1, 1), center=App.Vector(0, 0, 0), copy=False
):
"""scale(objects, scale, [center], copy)
Scales the objects contained in objects (that can be a list of objects or
an object) of the given around given center.
Parameters
----------
objectlist : list
scale : Base.Vector
Scale factors defined by a given vector (in X, Y, Z directions).
objectlist : Base.Vector
Center of the scale operation.
copy : bool
If copy is True, the actual objects are not scaled, but copies
are created instead.
Return
----------
The objects (or their copies) are returned.
"""
if not isinstance(objectslist, list):
objectslist = [objectslist]
newobjlist = []
for obj in objectslist:
if copy:
newobj = make_copy.make_copy(obj)
else:
newobj = obj
if hasattr(obj, "Shape"):
scaled_shape = obj.Shape.copy()
m = App.Matrix()
m.move(center.negative())
m.scale(scale.x, scale.y, scale.z)
m.move(center)
scaled_shape = scaled_shape.transformGeometry(m)
if utils.get_type(obj) == "Rectangle":
p = []
for v in scaled_shape.Vertexes:
p.append(v.Point)
pl = obj.Placement.copy()
pl.Base = p[0]
diag = p[2].sub(p[0])
bb = p[1].sub(p[0])
bh = p[3].sub(p[0])
nb = DraftVecUtils.project(diag, bb)
nh = DraftVecUtils.project(diag, bh)
if obj.Length < 0:
l = -nb.Length
else:
l = nb.Length
if obj.Height < 0:
h = -nh.Length
else:
h = nh.Length
newobj.Length = l
newobj.Height = h
tr = p[0].sub(obj.Shape.Vertexes[0].Point) # unused?
newobj.Placement = pl
elif utils.get_type(obj) == "Wire" or utils.get_type(obj) == "BSpline":
for index, point in enumerate(newobj.Points):
scale_vertex(newobj, index, scale, center)
elif hasattr(obj, "Shape"):
newobj.Shape = scaled_shape
elif hasattr(obj, "Position"):
d = obj.Position.sub(center)
newobj.Position = center.add(
App.Vector(d.x * scale.x, d.y * scale.y, d.z * scale.z)
)
elif hasattr(obj, "Placement"):
d = obj.Placement.Base.sub(center)
newobj.Placement.Base = center.add(
App.Vector(d.x * scale.x, d.y * scale.y, d.z * scale.z)
)
if hasattr(obj, "Height"):
obj.setExpression("Height", None)
obj.Height = obj.Height * scale.y
if hasattr(obj, "Width"):
obj.setExpression("Width", None)
obj.Width = obj.Width * scale.x
if hasattr(obj, "XSize"):
obj.setExpression("XSize", None)
obj.XSize = obj.XSize * scale.x
if hasattr(obj, "YSize"):
obj.setExpression("YSize", None)
obj.YSize = obj.YSize * scale.y
if obj.ViewObject and hasattr(obj.ViewObject, "FontSize"):
obj.ViewObject.FontSize = obj.ViewObject.FontSize * scale.y
if copy:
gui_utils.format_object(newobj, obj)
newobjlist.append(newobj)
if copy and utils.get_param("selectBaseObjects", False):
gui_utils.select(objectslist)
else:
gui_utils.select(newobjlist)
if len(newobjlist) == 1:
return newobjlist[0]
return newobjlist
# Following functions are needed for SubObjects modifiers
# implemented by Dion Moult during 0.19 dev cycle (works only with Draft Wire)
def scale_vertex(obj, vertex_index, scale, center):
"""
Needed for SubObjects modifiers.
Implemented by Dion Moult during 0.19 dev cycle (works only with Draft Wire).
"""
points = obj.Points
points[vertex_index] = (
obj.getGlobalPlacement()
.inverse()
.multVec(
scale_vector_from_center(
obj.getGlobalPlacement().multVec(points[vertex_index]), scale, center
)
)
)
obj.Points = points
scaleVertex = scale_vertex
def scale_vector_from_center(vector, scale, center):
"""
Needed for SubObjects modifiers.
Implemented by Dion Moult during 0.19 dev cycle (works only with Draft Wire).
"""
return vector.sub(center).scale(scale.x, scale.y, scale.z).add(center)
scaleVectorFromCenter = scale_vector_from_center
def scale_edge(obj, edge_index, scale, center):
"""
Needed for SubObjects modifiers.
Implemented by Dion Moult during 0.19 dev cycle (works only with Draft Wire).
"""
scale_vertex(obj, edge_index, scale, center)
if utils.is_closed_edge(edge_index, obj):
scale_vertex(obj, 0, scale, center)
else:
scale_vertex(obj, edge_index + 1, scale, center)
scaleEdge = scale_edge
def copy_scaled_edge(obj, edge_index, scale, center):
"""
Needed for SubObjects modifiers.
Implemented by Dion Moult during 0.19 dev cycle (works only with Draft Wire).
"""
vertex1 = scale_vector_from_center(
obj.getGlobalPlacement().multVec(obj.Points[edge_index]), scale, center
)
if utils.is_closed_edge(edge_index, obj):
vertex2 = scale_vector_from_center(
obj.getGlobalPlacement().multVec(obj.Points[0]), scale, center
)
else:
vertex2 = scale_vector_from_center(
obj.getGlobalPlacement().multVec(obj.Points[edge_index + 1]), scale, center
)
return make_line.make_line(vertex1, vertex2)
copyScaledEdge = copy_scaled_edge
def copy_scaled_edges(arguments):
"""
Needed for SubObjects modifiers.
Implemented by Dion Moult during 0.19 dev cycle (works only with Draft Wire).
"""
copied_edges = []
for argument in arguments:
copied_edges.append(
copy_scaled_edge(argument[0], argument[1], argument[2], argument[3])
)
join.join_wires(copied_edges)
copyScaledEdges = copy_scaled_edges
## @}
|
fta | inhibitgate | """Inhibit gate item definition."""
from gaphas.geometry import Rectangle
from gaphor.core.modeling import DrawContext
from gaphor.diagram.presentation import (
Classified,
ElementPresentation,
from_package_str,
)
from gaphor.diagram.shapes import Box, IconBox, Text, stroke
from gaphor.diagram.support import represents
from gaphor.diagram.text import FontStyle, FontWeight
from gaphor.RAAML import raaml
from gaphor.RAAML.fta.constants import DEFAULT_FTA_MAJOR
from gaphor.UML.recipes import stereotypes_str
@represents(raaml.INHIBIT)
class InhibitItem(Classified, ElementPresentation):
def __init__(self, diagram, id=None):
super().__init__(diagram, id, width=DEFAULT_FTA_MAJOR, height=DEFAULT_FTA_MAJOR)
self.watch("subject[NamedElement].name").watch(
"subject[NamedElement].namespace.name"
)
def update_shapes(self, event=None):
self.shape = IconBox(
Box(
draw=draw_inhibit_gate,
),
Text(
text=lambda: stereotypes_str(
self.subject, [self.diagram.gettext("Inhibit Gate")]
),
),
Text(
text=lambda: self.subject.name or "",
width=lambda: self.width - 4,
style={
"font-weight": FontWeight.BOLD,
"font-style": FontStyle.NORMAL,
},
),
Text(
text=lambda: from_package_str(self),
style={"font-size": "x-small"},
),
)
def draw_inhibit_gate(box, context: DrawContext, bounding_box: Rectangle):
cr = context.cairo
# Top vertical line
left = 0
middle_width = bounding_box.width * 5.0 / 12.0
bottom_point = bounding_box.height * 5.0 / 6.0
top_point = bounding_box.height / 6.0
right = bounding_box.width * 5.0 / 6.0
upper = bounding_box.height / 3.0
lower = bounding_box.height * 2.0 / 3.0
cr.move_to(middle_width, 0)
cr.line_to(middle_width, top_point)
# Move around the hexagon counter-clockwise
cr.line_to(left, upper) # 1st side
cr.line_to(left, lower) # 2nd side
cr.line_to(middle_width, bottom_point) # 3rd side
cr.line_to(right, lower) # 4th side
cr.line_to(right, upper) # 5th side
cr.line_to(middle_width, top_point) # 6th side
# Bottom vertical line
cr.move_to(middle_width, bounding_box.height)
cr.line_to(middle_width, bottom_point)
# Right horizontal line
middle_height = bounding_box.height / 2.0
cr.move_to(right, middle_height)
cr.line_to(bounding_box.width, middle_height)
stroke(context, fill=True)
|
deluge | setup | #!/usr/bin/env python
#
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2009 Damien Churchill <damoxc@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import glob
import os
import platform
import sys
from distutils.command.build import build as _build
from distutils.command.clean import clean as _clean
from distutils.command.install_data import install_data as _install_data
from shutil import rmtree, which
import msgfmt
from setuptools import Command, find_packages, setup
from setuptools.command.test import test as _test
from version import get_version
try:
from sphinx.setup_command import BuildDoc
except ImportError:
class BuildDoc:
pass
def windows_check():
return platform.system() in ("Windows", "Microsoft")
def osx_check():
return platform.system() == "Darwin"
desktop_data = "deluge/ui/data/share/applications/deluge.desktop"
metainfo_data = "deluge/ui/data/share/metainfo/deluge.metainfo.xml"
# Variables for setuptools.setup
_package_data = {}
_exclude_package_data = {}
_entry_points = {"console_scripts": [], "gui_scripts": [], "deluge.ui": []}
_data_files = []
_version = get_version(prefix="deluge-", suffix=".dev0")
class PyTest(_test):
def initialize_options(self):
_test.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
_test.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errcode = pytest.main(self.test_args)
sys.exit(errcode)
class CleanDocs(Command):
description = "Clean the documentation build and module rst files"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
docs_build = "docs/build"
print(f"Deleting {docs_build}")
try:
rmtree(docs_build)
except OSError:
pass
for module in glob.glob("docs/source/modules/deluge*.rst"):
os.remove(module)
class BuildWebUI(Command):
description = "Minify WebUI files"
user_options = []
JS_DIR = os.path.join("deluge", "ui", "web", "js")
JS_SRC_DIRS = ("deluge-all", os.path.join("extjs", "ext-extensions"))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
js_basedir = os.path.join(os.path.dirname(__file__), self.JS_DIR)
try:
from minify_web_js import minify_js_dir
import_error = ""
except ImportError as err:
import_error = err
for js_src_dir in self.JS_SRC_DIRS:
source_dir = os.path.join(js_basedir, js_src_dir)
try:
minify_js_dir(source_dir)
except NameError:
js_file = source_dir + ".js"
if os.path.isfile(js_file):
print(
"Unable to minify but found existing minified: {}".format(
js_file
)
)
else:
# Unable to minify and no existing minified file found so exiting.
print("Import error: %s" % import_error)
sys.exit(1)
# Create the gettext.js file for translations.
try:
from gen_web_gettext import create_gettext_js
except ImportError:
pass
else:
deluge_all_path = os.path.join(js_basedir, self.JS_SRC_DIRS[0])
print("Creating WebUI translation file: %s/gettext.js" % deluge_all_path)
create_gettext_js(deluge_all_path)
class CleanWebUI(Command):
description = "Clean the documentation build and rst files"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
js_basedir = os.path.join(os.path.dirname(__file__), BuildWebUI.JS_DIR)
# Remove files generated by minify script.
for js_src_dir in BuildWebUI.JS_SRC_DIRS:
for file_type in (".js", "-debug.js"):
js_file = os.path.join(js_basedir, js_src_dir + file_type)
print(f"Deleting {js_file}")
try:
os.remove(js_file)
except OSError:
pass
# Remove generated gettext.js
js_file = os.path.join(js_basedir, "gettext.js")
print(f"Deleting {js_file}")
try:
os.remove(js_file)
except OSError:
pass
class BuildTranslations(Command):
description = "Compile .po files into .mo files & create .desktop file"
user_options = [
("build-lib", None, "lib build folder"),
("develop", "D", "Compile translations in develop mode (deluge/i18n)"),
]
boolean_options = ["develop"]
def initialize_options(self):
self.build_lib = None
self.develop = False
def finalize_options(self):
self.set_undefined_options("build", ("build_lib", "build_lib"))
def run(self):
po_dir = os.path.join(os.path.dirname(__file__), "deluge", "i18n")
if self.develop:
basedir = po_dir
else:
basedir = os.path.join(self.build_lib, "deluge", "i18n")
intltool_merge = "intltool-merge"
if not windows_check() and which(intltool_merge):
intltool_merge_opts = "--utf8 --quiet"
for data_file in (desktop_data, metainfo_data):
# creates the translated file from .in file.
in_file = data_file + ".in"
if "xml" in data_file:
intltool_merge_opts += " --xml-style"
elif "desktop" in data_file:
intltool_merge_opts += " --desktop-style"
print("Creating file: %s" % data_file)
os.system(
"C_ALL=C "
+ "%s "
* 5
% (intltool_merge, intltool_merge_opts, po_dir, in_file, data_file)
)
print("Compiling po files from %s..." % po_dir)
for path, names, filenames in os.walk(po_dir):
for f in filenames:
upto_date = False
if f.endswith(".po"):
lang = f[: len(f) - 3]
src = os.path.join(path, f)
dest_path = os.path.join(basedir, lang, "LC_MESSAGES")
dest = os.path.join(dest_path, "deluge.mo")
if not os.path.exists(dest_path):
os.makedirs(dest_path)
if not os.path.exists(dest):
sys.stdout.write("%s, " % lang)
sys.stdout.flush()
msgfmt.make(src, dest)
else:
src_mtime = os.stat(src)[8]
dest_mtime = os.stat(dest)[8]
if src_mtime > dest_mtime:
sys.stdout.write("%s, " % lang)
sys.stdout.flush()
msgfmt.make(src, dest)
else:
upto_date = True
if upto_date:
sys.stdout.write(" po files already up to date. ")
sys.stdout.write("\b\b \nFinished compiling translation files. \n")
class CleanTranslations(Command):
description = "Cleans translations files."
user_options = [
("all", "a", "Remove all build output, not just temporary by-products")
]
boolean_options = ["all"]
def initialize_options(self):
self.all = None
def finalize_options(self):
self.set_undefined_options("clean", ("all", "all"))
def run(self):
for path in (desktop_data, metainfo_data):
if os.path.isfile(path):
print("Deleting %s" % path)
os.remove(path)
class BuildPlugins(Command):
description = "Build plugins into .eggs"
user_options = [
("install-dir=", None, "develop install folder"),
("develop", "D", "Compile plugins in develop mode"),
]
boolean_options = ["develop"]
def initialize_options(self):
self.install_dir = None
self.develop = False
def finalize_options(self):
pass
def run(self):
# Build the plugin eggs
plugin_path = "deluge/plugins/*"
for path in glob.glob(plugin_path):
if os.path.exists(os.path.join(path, "setup.py")):
if self.develop and self.install_dir:
os.system(
"cd "
+ path
+ "&& "
+ sys.executable
+ " setup.py develop --install-dir=%s" % self.install_dir
)
elif self.develop:
os.system(
"cd " + path + "&& " + sys.executable + " setup.py develop"
)
else:
os.system(
"cd "
+ path
+ "&& "
+ sys.executable
+ " setup.py bdist_egg -d .."
)
class CleanPlugins(Command):
description = "Cleans the plugin folders"
user_options = [
("all", "a", "Remove all build output, not just temporary by-products")
]
boolean_options = ["all"]
def initialize_options(self):
self.all = None
def finalize_options(self):
self.set_undefined_options("clean", ("all", "all"))
def run(self):
print("Cleaning the plugin's folders...")
plugin_path = "deluge/plugins/*"
for path in glob.glob(plugin_path):
if os.path.exists(os.path.join(path, "setup.py")):
c = "cd " + path + " && " + sys.executable + " setup.py clean"
if self.all:
c += " -a"
print("Calling '%s'" % c)
os.system(c)
# Delete the .eggs
if path[-4:] == ".egg":
print('Deleting egg file "%s"' % path)
os.remove(path)
# Delete the .egg-link
if path[-9:] == ".egg-link":
print('Deleting egg link "%s"' % path)
os.remove(path)
egg_info_dir_path = "deluge/plugins/*/*.egg-info"
for path in glob.glob(egg_info_dir_path):
# Delete the .egg-info's directories
if path[-9:] == ".egg-info":
print("Deleting %s" % path)
for fpath in os.listdir(path):
os.remove(os.path.join(path, fpath))
os.removedirs(path)
class EggInfoPlugins(Command):
description = "Create .egg-info directories for plugins"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# Build the plugin eggs
plugin_path = "deluge/plugins/*"
for path in glob.glob(plugin_path):
if os.path.exists(os.path.join(path, "setup.py")):
os.system("cd " + path + "&& " + sys.executable + " setup.py egg_info")
class Build(_build):
sub_commands = [
("build_webui", None),
("build_trans", None),
("build_plugins", None),
] + _build.sub_commands
def run(self):
# Run all sub-commands (at least those that need to be run).
_build.run(self)
try:
from deluge._libtorrent import LT_VERSION
print(f"Info: Found libtorrent ({LT_VERSION}) installed.")
except ImportError as ex:
print("Warning: libtorrent (libtorrent-rasterbar) not found: %s" % ex)
class InstallData(_install_data):
"""Custom class to fix `setup install` copying data files to incorrect location. (Bug #1389)"""
def finalize_options(self):
self.install_dir = None
self.set_undefined_options(
"install",
("install_data", "install_dir"),
("root", "root"),
("force", "force"),
)
def run(self):
_install_data.run(self)
class Clean(_clean):
sub_commands = _clean.sub_commands + [
("clean_plugins", None),
("clean_trans", None),
("clean_webui", None),
]
def run(self):
# Remove deluge egg-info.
root_egg_info_dir_path = "deluge*.egg-info"
for path in glob.glob(root_egg_info_dir_path):
print("Deleting %s" % path)
for fpath in os.listdir(path):
os.remove(os.path.join(path, fpath))
os.removedirs(path)
# Run all sub-commands (at least those that need to be run)
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
_clean.run(self)
cmdclass = {
"build": Build,
"build_webui": BuildWebUI,
"build_trans": BuildTranslations,
"build_plugins": BuildPlugins,
"build_docs": BuildDoc,
"spellcheck_docs": BuildDoc,
"install_data": InstallData,
"clean_plugins": CleanPlugins,
"clean_trans": CleanTranslations,
"clean_docs": CleanDocs,
"clean_webui": CleanWebUI,
"clean": Clean,
"egg_info_plugins": EggInfoPlugins,
"test": PyTest,
}
if not windows_check() and not osx_check():
for icon_path in glob.glob("deluge/ui/data/icons/hicolor/*x*"):
size = os.path.basename(icon_path)
icons = glob.glob(os.path.join(icon_path, "apps", "deluge*.png"))
_data_files.append((f"share/icons/hicolor/{size}/apps", icons))
_data_files.extend(
[
(
"share/icons/hicolor/scalable/apps",
["deluge/ui/data/icons/hicolor/scalable/apps/deluge.svg"],
),
("share/pixmaps", ["deluge/ui/data/pixmaps/deluge.png"]),
(
"share/man/man1",
[
"docs/man/deluge.1",
"docs/man/deluged.1",
"docs/man/deluge-gtk.1",
"docs/man/deluge-web.1",
"docs/man/deluge-console.1",
],
),
]
)
if os.path.isfile(desktop_data):
_data_files.append(("share/applications", [desktop_data]))
if os.path.isfile(metainfo_data):
_data_files.append(("share/metainfo", [metainfo_data]))
# Entry Points
_entry_points["console_scripts"] = [
"deluge-console = deluge.ui.console:start",
]
# On Windows use gui_scripts to hide cmd popup (no effect on Linux/MacOS)
_entry_points["gui_scripts"] = [
"deluge = deluge.ui.ui_entry:start_ui",
"deluge-gtk = deluge.ui.gtk3:start",
"deluge-web = deluge.ui.web:start",
"deluged = deluge.core.daemon_entry:start_daemon",
]
# Provide Windows 'debug' exes for stdin/stdout e.g. logging/errors
if windows_check():
_entry_points["console_scripts"].extend(
[
"deluge-debug = deluge.ui.ui_entry:start_ui",
"deluge-web-debug = deluge.ui.web:start",
"deluged-debug = deluge.core.daemon_entry:start_daemon",
]
)
_entry_points["deluge.ui"] = [
"console = deluge.ui.console:Console",
"web = deluge.ui.web:Web",
"gtk = deluge.ui.gtk3:Gtk",
]
_package_data["deluge"] = [
"ui/data/pixmaps/*.png",
"ui/data/pixmaps/*.svg",
"ui/data/pixmaps/*.ico",
"ui/data/pixmaps/*.gif",
"ui/data/pixmaps/flags/*.png",
"plugins/*.egg",
"i18n/*/LC_MESSAGES/*.mo",
]
_package_data["deluge.ui.web"] = [
"index.html",
"css/*.css",
"icons/*.png",
"images/*.gif",
"images/*.png",
"js/*.js",
"js/extjs/*.js",
"render/*.html",
"themes/css/*.css",
"themes/images/*/*.gif",
"themes/images/*/*.png",
"themes/images/*/*/*.gif",
"themes/images/*/*/*.png",
]
_package_data["deluge.ui.gtk3"] = ["glade/*.ui"]
setup_requires = ["setuptools", "wheel"]
install_requires = [
"twisted[tls]>=17.1",
# Add pyasn1 for setuptools workaround:
# https://github.com/pypa/setuptools/issues/1510
"pyasn1",
"rencode",
"pyopenssl",
"pyxdg",
"mako",
"setuptools",
"pywin32; sys_platform == 'win32'",
"certifi; sys_platform == 'win32'",
"zope.interface",
]
extras_require = {
"all": [
"setproctitle",
"pillow",
"chardet",
"ifaddr",
]
}
# Main setup
setup(
name="deluge",
version=_version,
fullname="Deluge BitTorrent Client",
description="BitTorrent Client",
author="Deluge Team",
maintainer="Calum Lind",
maintainer_email="calumlind+deluge@gmail.com",
keywords="torrent bittorrent p2p fileshare filesharing",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://deluge-torrent.org",
project_urls={
"GitHub (mirror)": "https://github.com/deluge-torrent/deluge",
"Sourcecode": "http://git.deluge-torrent.org/deluge",
"Issues": "https://dev.deluge-torrent.org/report/1",
"Discussion": "https://forum.deluge-torrent.org",
"Documentation": "https://deluge.readthedocs.io",
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Environment :: Web Environment",
"Environment :: X11 Applications :: GTK",
"Framework :: Twisted",
"Intended Audience :: End Users/Desktop",
(
"License :: OSI Approved :: "
"GNU General Public License v3 or later (GPLv3+)"
),
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Topic :: Internet",
],
python_requires=">=3.6",
license="GPLv3+",
cmdclass=cmdclass,
setup_requires=setup_requires,
install_requires=install_requires,
extras_require=extras_require,
data_files=_data_files,
package_data=_package_data,
exclude_package_data=_exclude_package_data,
packages=find_packages(exclude=["deluge.plugins.*", "deluge.tests"]),
entry_points=_entry_points,
)
|
core | views | # -*- coding: utf-8 -*-
from babybuddy.mixins import LoginRequiredMixin, PermissionRequiredMixin
from babybuddy.views import BabyBuddyFilterView
from core import filters, forms, models, timeline
from django.contrib import messages
from django.contrib.messages.views import SuccessMessageMixin
from django.forms import Form
from django.http import HttpResponseRedirect
from django.urls import reverse, reverse_lazy
from django.utils import timezone
from django.utils.translation import gettext as _
from django.views.generic.base import RedirectView, TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, DeleteView, FormView, UpdateView
def _prepare_timeline_context_data(context, date, child=None):
date = timezone.datetime.strptime(date, "%Y-%m-%d")
date = timezone.localtime(timezone.make_aware(date))
context["timeline_objects"] = timeline.get_objects(date, child)
context["date"] = date
context["date_previous"] = date - timezone.timedelta(days=1)
if date.date() < timezone.localdate():
context["date_next"] = date + timezone.timedelta(days=1)
pass
class CoreAddView(PermissionRequiredMixin, SuccessMessageMixin, CreateView):
def get_success_message(self, cleaned_data):
cleaned_data["model"] = self.model._meta.verbose_name.title()
if "child" in cleaned_data:
self.success_message = _("%(model)s entry for %(child)s added!")
else:
self.success_message = _("%(model)s entry added!")
return self.success_message % cleaned_data
def get_form_kwargs(self):
"""
Check for and add "child" and "timer" from request query parameters.
- "child" may provide a slug for a Child instance.
- "timer" may provided an ID for a Timer instance.
These arguments are used in some add views to pre-fill initial data in
the form fields.
:return: Updated keyword arguments.
"""
kwargs = super(CoreAddView, self).get_form_kwargs()
for parameter in ["child", "timer"]:
value = self.request.GET.get(parameter, None)
if value:
kwargs.update({parameter: value})
return kwargs
class CoreUpdateView(PermissionRequiredMixin, SuccessMessageMixin, UpdateView):
def get_success_message(self, cleaned_data):
cleaned_data["model"] = self.model._meta.verbose_name.title()
if "child" in cleaned_data:
self.success_message = _("%(model)s entry for %(child)s updated.")
else:
self.success_message = _("%(model)s entry updated.")
return self.success_message % cleaned_data
class CoreDeleteView(PermissionRequiredMixin, SuccessMessageMixin, DeleteView):
def get_success_message(self, cleaned_data):
return _("%(model)s entry deleted.") % {
"model": self.model._meta.verbose_name.title()
}
class BMIList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.BMI
template_name = "core/bmi_list.html"
permission_required = ("core.view_bmi",)
paginate_by = 10
filterset_class = filters.BMIFilter
class BMIAdd(CoreAddView):
model = models.BMI
permission_required = ("core.add_bmi",)
form_class = forms.BMIForm
success_url = reverse_lazy("core:bmi-list")
class BMIUpdate(CoreUpdateView):
model = models.BMI
permission_required = ("core.change_bmi",)
form_class = forms.BMIForm
success_url = reverse_lazy("core:bmi-list")
class BMIDelete(CoreDeleteView):
model = models.BMI
permission_required = ("core.delete_bmi",)
success_url = reverse_lazy("core:bmi-list")
class ChildList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Child
template_name = "core/child_list.html"
permission_required = ("core.view_child",)
paginate_by = 10
filterset_fields = ("first_name", "last_name")
class ChildAdd(CoreAddView):
model = models.Child
permission_required = ("core.add_child",)
form_class = forms.ChildForm
success_url = reverse_lazy("core:child-list")
success_message = _("%(first_name)s %(last_name)s added!")
class ChildDetail(PermissionRequiredMixin, DetailView):
model = models.Child
permission_required = ("core.view_child",)
def get_context_data(self, **kwargs):
context = super(ChildDetail, self).get_context_data(**kwargs)
date = self.request.GET.get("date", str(timezone.localdate()))
_prepare_timeline_context_data(context, date, self.object)
return context
class ChildUpdate(CoreUpdateView):
model = models.Child
permission_required = ("core.change_child",)
form_class = forms.ChildForm
success_url = reverse_lazy("core:child-list")
class ChildDelete(CoreUpdateView):
model = models.Child
form_class = forms.ChildDeleteForm
template_name = "core/child_confirm_delete.html"
permission_required = ("core.delete_child",)
success_url = reverse_lazy("core:child-list")
def get_success_message(self, cleaned_data):
"""This class cannot use `CoreDeleteView` because of the confirmation
step required so the success message must be overridden."""
success_message = _("%(model)s entry deleted.") % {
"model": self.model._meta.verbose_name.title()
}
return success_message % cleaned_data
class DiaperChangeList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.DiaperChange
template_name = "core/diaperchange_list.html"
permission_required = ("core.view_diaperchange",)
paginate_by = 10
filterset_class = filters.DiaperChangeFilter
class DiaperChangeAdd(CoreAddView):
model = models.DiaperChange
permission_required = ("core.add_diaperchange",)
form_class = forms.DiaperChangeForm
success_url = reverse_lazy("core:diaperchange-list")
class DiaperChangeUpdate(CoreUpdateView):
model = models.DiaperChange
permission_required = ("core.change_diaperchange",)
form_class = forms.DiaperChangeForm
success_url = reverse_lazy("core:diaperchange-list")
class DiaperChangeDelete(CoreDeleteView):
model = models.DiaperChange
permission_required = ("core.delete_diaperchange",)
success_url = reverse_lazy("core:diaperchange-list")
class FeedingList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Feeding
template_name = "core/feeding_list.html"
permission_required = ("core.view_feeding",)
paginate_by = 10
filterset_class = filters.FeedingFilter
class FeedingAdd(CoreAddView):
model = models.Feeding
permission_required = ("core.add_feeding",)
form_class = forms.FeedingForm
success_url = reverse_lazy("core:feeding-list")
class FeedingUpdate(CoreUpdateView):
model = models.Feeding
permission_required = ("core.change_feeding",)
form_class = forms.FeedingForm
success_url = reverse_lazy("core:feeding-list")
class FeedingDelete(CoreDeleteView):
model = models.Feeding
permission_required = ("core.delete_feeding",)
success_url = reverse_lazy("core:feeding-list")
class HeadCircumferenceList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.HeadCircumference
template_name = "core/head_circumference_list.html"
permission_required = ("core.view_head_circumference",)
paginate_by = 10
filterset_class = filters.HeadCircumferenceFilter
class HeadCircumferenceAdd(CoreAddView):
model = models.HeadCircumference
template_name = "core/head_circumference_form.html"
permission_required = ("core.add_head_circumference",)
form_class = forms.HeadCircumferenceForm
success_url = reverse_lazy("core:head-circumference-list")
class HeadCircumferenceUpdate(CoreUpdateView):
model = models.HeadCircumference
template_name = "core/head_circumference_form.html"
permission_required = ("core.change_head_circumference",)
form_class = forms.HeadCircumferenceForm
success_url = reverse_lazy("core:head-circumference-list")
class HeadCircumferenceDelete(CoreDeleteView):
model = models.HeadCircumference
template_name = "core/head_circumference_confirm_delete.html"
permission_required = ("core.delete_head_circumference",)
success_url = reverse_lazy("core:head-circumference-list")
class HeightList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Height
template_name = "core/height_list.html"
permission_required = ("core.view_height",)
paginate_by = 10
filterset_class = filters.HeightFilter
class HeightAdd(CoreAddView):
model = models.Height
permission_required = ("core.add_height",)
form_class = forms.HeightForm
success_url = reverse_lazy("core:height-list")
class HeightUpdate(CoreUpdateView):
model = models.Height
permission_required = ("core.change_height",)
form_class = forms.HeightForm
success_url = reverse_lazy("core:height-list")
class HeightDelete(CoreDeleteView):
model = models.Height
permission_required = ("core.delete_height",)
success_url = reverse_lazy("core:height-list")
class NoteList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Note
template_name = "core/note_list.html"
permission_required = ("core.view_note",)
paginate_by = 10
filterset_class = filters.NoteFilter
class NoteAdd(CoreAddView):
model = models.Note
permission_required = ("core.add_note",)
form_class = forms.NoteForm
success_url = reverse_lazy("core:note-list")
class NoteUpdate(CoreUpdateView):
model = models.Note
permission_required = ("core.change_note",)
form_class = forms.NoteForm
success_url = reverse_lazy("core:note-list")
class NoteDelete(CoreDeleteView):
model = models.Note
permission_required = ("core.delete_note",)
success_url = reverse_lazy("core:note-list")
class PumpingList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Pumping
template_name = "core/pumping_list.html"
permission_required = ("core.view_pumping",)
paginate_by = 10
filterset_class = filters.PumpingFilter
class PumpingAdd(CoreAddView):
model = models.Pumping
permission_required = ("core.add_pumping",)
form_class = forms.PumpingForm
success_url = reverse_lazy("core:pumping-list")
success_message = _("%(model)s entry added!")
class PumpingUpdate(CoreUpdateView):
model = models.Pumping
permission_required = ("core.change_pumping",)
form_class = forms.PumpingForm
success_url = reverse_lazy("core:pumping-list")
success_message = _("%(model)s entry for %(child)s updated.")
class PumpingDelete(CoreDeleteView):
model = models.Pumping
permission_required = ("core.delete_pumping",)
success_url = reverse_lazy("core:pumping-list")
class SleepList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Sleep
template_name = "core/sleep_list.html"
permission_required = ("core.view_sleep",)
paginate_by = 10
filterset_class = filters.SleepFilter
class SleepAdd(CoreAddView):
model = models.Sleep
permission_required = ("core.add_sleep",)
form_class = forms.SleepForm
success_url = reverse_lazy("core:sleep-list")
class SleepUpdate(CoreUpdateView):
model = models.Sleep
permission_required = ("core.change_sleep",)
form_class = forms.SleepForm
success_url = reverse_lazy("core:sleep-list")
class SleepDelete(CoreDeleteView):
model = models.Sleep
permission_required = ("core.delete_sleep",)
success_url = reverse_lazy("core:sleep-list")
class TemperatureList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Temperature
template_name = "core/temperature_list.html"
permission_required = ("core.view_temperature",)
paginate_by = 10
filterset_class = filters.TemperatureFilter
class TemperatureAdd(CoreAddView):
model = models.Temperature
permission_required = ("core.add_temperature",)
form_class = forms.TemperatureForm
success_url = reverse_lazy("core:temperature-list")
success_message = _("%(model)s reading added!")
class TemperatureUpdate(CoreUpdateView):
model = models.Temperature
permission_required = ("core.change_temperature",)
form_class = forms.TemperatureForm
success_url = reverse_lazy("core:temperature-list")
success_message = _("%(model)s reading for %(child)s updated.")
class TemperatureDelete(CoreDeleteView):
model = models.Temperature
permission_required = ("core.delete_temperature",)
success_url = reverse_lazy("core:temperature-list")
class Timeline(LoginRequiredMixin, TemplateView):
template_name = "timeline/timeline.html"
# Show the overall timeline or a child timeline if one Child instance.
def get(self, request, *args, **kwargs):
children = models.Child.objects.count()
if children == 1:
return HttpResponseRedirect(
reverse("core:child", args={models.Child.objects.first().slug})
)
return super(Timeline, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(Timeline, self).get_context_data(**kwargs)
date = self.request.GET.get("date", str(timezone.localdate()))
_prepare_timeline_context_data(context, date)
return context
class TimerList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Timer
template_name = "core/timer_list.html"
permission_required = ("core.view_timer",)
paginate_by = 10
filterset_fields = ("user",)
class TimerDetail(PermissionRequiredMixin, DetailView):
model = models.Timer
permission_required = ("core.view_timer",)
class TimerAdd(PermissionRequiredMixin, CreateView):
model = models.Timer
permission_required = ("core.add_timer",)
form_class = forms.TimerForm
def get_form_kwargs(self):
kwargs = super(TimerAdd, self).get_form_kwargs()
kwargs.update({"user": self.request.user})
return kwargs
def get_success_url(self):
return reverse("core:timer-detail", kwargs={"pk": self.object.pk})
class TimerUpdate(CoreUpdateView):
model = models.Timer
permission_required = ("core.change_timer",)
form_class = forms.TimerForm
success_url = reverse_lazy("core:timer-list")
def get_form_kwargs(self):
kwargs = super(TimerUpdate, self).get_form_kwargs()
kwargs.update({"user": self.request.user})
return kwargs
def get_success_url(self):
instance = self.get_object()
return reverse("core:timer-detail", kwargs={"pk": instance.pk})
class TimerAddQuick(PermissionRequiredMixin, RedirectView):
http_method_names = ["post"]
permission_required = ("core.add_timer",)
def post(self, request, *args, **kwargs):
instance = models.Timer.objects.create(user=request.user)
# Find child from child pk in POST
child_id = request.POST.get("child", False)
child = models.Child.objects.get(pk=child_id) if child_id else None
if child:
instance.child = child
# Add child relationship if there is only Child instance.
elif models.Child.count() == 1:
instance.child = models.Child.objects.first()
instance.save()
self.url = request.GET.get(
"next", reverse("core:timer-detail", args={instance.id})
)
return super(TimerAddQuick, self).get(request, *args, **kwargs)
class TimerRestart(PermissionRequiredMixin, RedirectView):
http_method_names = ["post"]
permission_required = ("core.change_timer",)
def post(self, request, *args, **kwargs):
instance = models.Timer.objects.get(id=kwargs["pk"])
instance.restart()
messages.success(request, "{} restarted.".format(instance))
return super(TimerRestart, self).get(request, *args, **kwargs)
def get_redirect_url(self, *args, **kwargs):
return reverse("core:timer-detail", kwargs={"pk": kwargs["pk"]})
class TimerDelete(CoreDeleteView):
model = models.Timer
permission_required = ("core.delete_timer",)
success_url = reverse_lazy("core:timer-list")
class TummyTimeList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.TummyTime
template_name = "core/tummytime_list.html"
permission_required = ("core.view_tummytime",)
paginate_by = 10
filterset_class = filters.TummyTimeFilter
class TummyTimeAdd(CoreAddView):
model = models.TummyTime
permission_required = ("core.add_tummytime",)
form_class = forms.TummyTimeForm
success_url = reverse_lazy("core:tummytime-list")
class TummyTimeUpdate(CoreUpdateView):
model = models.TummyTime
permission_required = ("core.change_tummytime",)
form_class = forms.TummyTimeForm
success_url = reverse_lazy("core:tummytime-list")
class TummyTimeDelete(CoreDeleteView):
model = models.TummyTime
permission_required = ("core.delete_tummytime",)
success_url = reverse_lazy("core:tummytime-list")
class WeightList(PermissionRequiredMixin, BabyBuddyFilterView):
model = models.Weight
template_name = "core/weight_list.html"
permission_required = ("core.view_weight",)
paginate_by = 10
filterset_class = filters.WeightFilter
class WeightAdd(CoreAddView):
model = models.Weight
permission_required = ("core.add_weight",)
form_class = forms.WeightForm
success_url = reverse_lazy("core:weight-list")
class WeightUpdate(CoreUpdateView):
model = models.Weight
permission_required = ("core.change_weight",)
form_class = forms.WeightForm
success_url = reverse_lazy("core:weight-list")
class WeightDelete(CoreDeleteView):
model = models.Weight
permission_required = ("core.delete_weight",)
success_url = reverse_lazy("core:weight-list")
|
ui | greeter | import importlib.resources
from pathlib import Path
from typing import NamedTuple
from gaphor.abc import ActionProvider, Service
from gaphor.action import action
from gaphor.application import distribution
from gaphor.babel import translate_model
from gaphor.core import event_handler
from gaphor.event import SessionCreated
from gaphor.i18n import gettext, translated_ui_string
from gaphor.ui import APPLICATION_ID
from gi.repository import Adw, GLib, Gtk
class ModelTemplate(NamedTuple):
name: str
description: str
icon: str
lang: str
filename: str
TEMPLATES = [
ModelTemplate(
gettext("Generic"),
gettext("An empty model"),
"org.gaphor.Gaphor",
"UML",
"blank.gaphor",
),
ModelTemplate(
gettext("UML"),
gettext("Unified Modeling Language template"),
"UML",
"UML",
"uml.gaphor",
),
ModelTemplate(
gettext("SysML"),
gettext("Systems Modeling Language template"),
"SysML",
"SysML",
"sysml.gaphor",
),
ModelTemplate(
gettext("RAAML"),
gettext("Risk Analysis and Assessment Modeling Language template"),
"RAAML",
"RAAML",
"raaml.gaphor",
),
ModelTemplate(
gettext("C4 Model"),
gettext("Layered C4 Model template"),
"C4Model",
"C4Model",
"c4model.gaphor",
),
]
def new_builder(ui_file):
builder = Gtk.Builder()
builder.add_from_string(translated_ui_string("gaphor.ui", f"{ui_file}.ui"))
return builder
class Greeter(Service, ActionProvider):
def __init__(self, application, event_manager, recent_manager=None):
self.application = application
self.event_manager = event_manager
self.recent_manager = recent_manager or Gtk.RecentManager.get_default()
self.greeter: Gtk.Window = None
self.gtk_app: Gtk.Application = None
event_manager.subscribe(self.on_session_created)
def init(self, gtk_app):
self.gtk_app = gtk_app
def shutdown(self):
self.event_manager.unsubscribe(self.on_session_created)
if self.greeter:
self.greeter.destroy()
self.gtk_app = None
def open(self) -> None:
if self.greeter:
self.greeter.present()
return
builder = new_builder("greeter")
self.greeter = builder.get_object("greeter")
self.greeter.set_application(self.gtk_app)
if ".dev" in distribution().version:
self.greeter.get_style_context().add_class("devel")
listbox = builder.get_object("recent-files")
templates = builder.get_object("templates")
if any(self.query_recent_files()):
for widget in self.create_recent_files():
listbox.add(widget)
else:
builder.get_object("recent-files").set_visible(False)
for widget in self.create_templates():
templates.add(widget)
self.greeter.connect("close-request", self._on_window_close_request)
self.greeter.present()
def close(self):
if self.greeter:
self.greeter.destroy()
self.greeter = None
@action(name="app.new-model", shortcut="<Primary>n")
def new_model(self):
self.open()
def query_recent_files(self):
for item in self.recent_manager.get_items():
if APPLICATION_ID in item.get_applications() and item.exists():
yield item
def create_recent_files(self):
for item in self.query_recent_files():
filename, _host = GLib.filename_from_uri(item.get_uri())
row = Adw.ActionRow.new()
row.set_activatable(True)
row.set_title(str(Path(filename).stem))
row.set_subtitle(item.get_uri_display().replace(str(Path.home()), "~"))
row.add_suffix(Gtk.Image.new_from_icon_name("go-next-symbolic"))
row.connect("activated", self._on_recent_file_activated)
row.filename = filename
yield row
def create_templates(self):
for template in TEMPLATES:
row = Adw.ActionRow.new()
row.set_activatable(True)
row.set_title(template.name)
row.set_subtitle(template.description)
image = Gtk.Image.new_from_icon_name(template.icon)
image.set_pixel_size(36)
row.add_prefix(image)
row.add_suffix(Gtk.Image.new_from_icon_name("go-next-symbolic"))
row.connect("activated", self._on_template_activated)
row.filename = template.filename
row.lang = template.lang
yield row
@event_handler(SessionCreated)
def on_session_created(self, _event=None):
self.close()
def _on_recent_file_activated(self, row):
filename = row.filename
self.application.new_session(filename=filename)
self.close()
def _on_template_activated(self, child):
filename = importlib.resources.files("gaphor") / "templates" / child.filename
translated_model = translate_model(filename)
session = self.application.new_session(template=translated_model)
session.get_service("properties").set("modeling-language", child.lang)
self.close()
def _on_window_close_request(self, window, event=None):
self.close()
return False
|
machine | geminipr | # Copyright (c) 2010-2011 Joshua Harlan Lifton.
# See LICENSE.txt for details.
"""Thread-based monitoring of a Gemini PR stenotype machine."""
import binascii
from plover import log
from plover.machine.base import SerialStenotypeBase
# In the Gemini PR protocol, each packet consists of exactly six bytes
# and the most significant bit (MSB) of every byte is used exclusively
# to indicate whether that byte is the first byte of the packet
# (MSB=1) or one of the remaining five bytes of the packet (MSB=0). As
# such, there are really only seven bits of steno data in each packet
# byte. This is why the STENO_KEY_CHART below is visually presented as
# six rows of seven elements instead of six rows of eight elements.
STENO_KEY_CHART = (
"Fn",
"#1",
"#2",
"#3",
"#4",
"#5",
"#6",
"S1-",
"S2-",
"T-",
"K-",
"P-",
"W-",
"H-",
"R-",
"A-",
"O-",
"*1",
"*2",
"res1",
"res2",
"pwr",
"*3",
"*4",
"-E",
"-U",
"-F",
"-R",
"-P",
"-B",
"-L",
"-G",
"-T",
"-S",
"-D",
"#7",
"#8",
"#9",
"#A",
"#B",
"#C",
"-Z",
)
BYTES_PER_STROKE = 6
class GeminiPr(SerialStenotypeBase):
"""Standard stenotype interface for a Gemini PR machine."""
KEYS_LAYOUT = """
#1 #2 #3 #4 #5 #6 #7 #8 #9 #A #B #C
Fn S1- T- P- H- *1 *3 -F -P -L -T -D
S2- K- W- R- *2 *4 -R -B -G -S -Z
A- O- -E -U
pwr
res1
res2
"""
def run(self):
"""Overrides base class run method. Do not call directly."""
self._ready()
for packet in self._iter_packets(BYTES_PER_STROKE):
if not (packet[0] & 0x80) or sum(b & 0x80 for b in packet[1:]):
log.error("discarding invalid packet: %s", binascii.hexlify(packet))
continue
steno_keys = []
for i, b in enumerate(packet):
for j in range(1, 8):
if b & (0x80 >> j):
steno_keys.append(STENO_KEY_CHART[i * 7 + j - 1])
self._notify_keys(steno_keys)
|
io | yaml | # Copyright 2016 Free Software Foundation, Inc.
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
from collections import OrderedDict
import yaml
from ..params.param import attributed_str
class GRCDumper(yaml.SafeDumper):
@classmethod
def add(cls, data_type):
def decorator(func):
cls.add_representer(data_type, func)
return func
return decorator
def represent_ordered_mapping(self, data):
value = []
node = yaml.MappingNode("tag:yaml.org,2002:map", value, flow_style=False)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
for item_key, item_value in data.items():
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
value.append((node_key, node_value))
return node
def represent_ordered_mapping_flowing(self, data):
node = self.represent_ordered_mapping(data)
node.flow_style = True
return node
def represent_list_flowing(self, data):
node = self.represent_list(data)
node.flow_style = True
return node
def represent_ml_string(self, data):
node = self.represent_str(data)
node.style = "|"
return node
class OrderedDictFlowing(OrderedDict):
pass
class ListFlowing(list):
pass
class MultiLineString(str):
pass
GRCDumper.add_representer(OrderedDict, GRCDumper.represent_ordered_mapping)
GRCDumper.add_representer(
OrderedDictFlowing, GRCDumper.represent_ordered_mapping_flowing
)
GRCDumper.add_representer(ListFlowing, GRCDumper.represent_list_flowing)
GRCDumper.add_representer(tuple, GRCDumper.represent_list)
GRCDumper.add_representer(MultiLineString, GRCDumper.represent_ml_string)
GRCDumper.add_representer(yaml.nodes.ScalarNode, lambda r, n: n)
GRCDumper.add_representer(attributed_str, GRCDumper.represent_str)
def dump(data, stream=None, **kwargs):
config = dict(stream=stream, default_flow_style=False, indent=4, Dumper=GRCDumper)
config.update(kwargs)
return yaml.dump_all([data], **config)
safe_load = yaml.safe_load
__with_libyaml__ = yaml.__with_libyaml__
|
plugins | zengatv | """
$description Indian live TV channels. OTT service from Zenga TV.
$url zengatv.com
$type live
"""
import logging
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.stream.hls import HLSStream
log = logging.getLogger(__name__)
@pluginmatcher(
re.compile(
r"https?://(www\.)?zengatv\.com/\w+",
)
)
class ZengaTV(Plugin):
"""Streamlink Plugin for livestreams on zengatv.com"""
_id_re = re.compile(r"""id=(?P<q>["'])dvrid(?P=q)\svalue=(?P=q)(?P<id>[^"']+)(?P=q)""")
_id_2_re = re.compile(r"""LivePlayer\(.+["'](?P<id>D\d+)["']""")
api_url = "http://www.zengatv.com/changeResulation/"
def _get_streams(self):
headers = {"Referer": self.url}
res = self.session.http.get(self.url, headers=headers)
for id_re in (self._id_re, self._id_2_re):
m = id_re.search(res.text)
if not m:
continue
break
if not m:
log.error("No video id found")
return
dvr_id = m.group("id")
log.debug("Found video id: {0}".format(dvr_id))
data = {"feed": "hd", "dvrId": dvr_id}
res = self.session.http.post(self.api_url, headers=headers, data=data)
if res.status_code == 200:
yield from HLSStream.parse_variant_playlist(self.session, res.text, headers=headers).items()
__plugin__ = ZengaTV
|
downloaders | SendspaceCom | # -*- coding: utf-8 -*-
import re
from ..base.simple_downloader import SimpleDownloader
class SendspaceCom(SimpleDownloader):
__name__ = "SendspaceCom"
__type__ = "downloader"
__version__ = "0.23"
__status__ = "testing"
__pattern__ = r"https?://(?:www\.)?sendspace\.com/file/\w+"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
]
__description__ = """Sendspace.com downloader plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
NAME_PATTERN = r'<h2 class="bgray">\s*<(?:b|strong)>(?P<N>.+?)</'
SIZE_PATTERN = r'<div class="file_description reverse margin_center">\s*<b>File Size:</b>\s*(?P<S>[\d.,]+)(?P<U>[\w^_]+)\s*</div>'
OFFLINE_PATTERN = r'<div class="msg error" style="cursor: default">Sorry, the file you requested is not available.</div>'
LINK_FREE_PATTERN = (
r'<a id="download_button" class="download_page_button button1" href="(.+?)"'
)
CAPTCHA_PATTERN = r'<td><img src="(/captchas/captcha\.php?captcha=(.+?))"></td>'
USER_CAPTCHA_PATTERN = r'<td><img src="/captchas/captcha\.php?user=(.+?))"></td>'
def handle_free(self, pyfile):
m = re.search(self.LINK_FREE_PATTERN, self.data)
if m is not None:
self.link = m.group(1)
else:
m = re.search(self.CAPTCHA_PATTERN, self.data)
if m is None:
params = {"download": "Regular Download"}
else:
captcha_url1 = "http://www.sendspace.com/" + m.group(1)
m = re.search(self.USER_CAPTCHA_PATTERN, self.data)
captcha_url2 = "http://www.sendspace.com/" + m.group(1)
params = {
"captcha_hash": m.group(2),
"captcha_submit": "Verify",
"captcha_answer": self.captcha.decrypt(captcha_url1)
+ " "
+ self.captcha.decrypt(captcha_url2),
}
self.log_debug(params)
self.data = self.load(pyfile.url, post=params)
m = re.search(self.LINK_FREE_PATTERN, self.data)
if m is None:
self.retry_captcha()
else:
self.link = m.group(1)
|
beetsplug | embyupdate | """Updates the Emby Library whenever the beets library is changed.
emby:
host: localhost
port: 8096
username: user
apikey: apikey
password: password
"""
import hashlib
from urllib.parse import parse_qs, urlencode, urljoin, urlsplit, urlunsplit
import requests
from beets import config
from beets.plugins import BeetsPlugin
def api_url(host, port, endpoint):
"""Returns a joined url.
Takes host, port and endpoint and generates a valid emby API url.
:param host: Hostname of the emby server
:param port: Portnumber of the emby server
:param endpoint: API endpoint
:type host: str
:type port: int
:type endpoint: str
:returns: Full API url
:rtype: str
"""
# check if http or https is defined as host and create hostname
hostname_list = [host]
if host.startswith("http://") or host.startswith("https://"):
hostname = "".join(hostname_list)
else:
hostname_list.insert(0, "http://")
hostname = "".join(hostname_list)
joined = urljoin("{hostname}:{port}".format(hostname=hostname, port=port), endpoint)
scheme, netloc, path, query_string, fragment = urlsplit(joined)
query_params = parse_qs(query_string)
query_params["format"] = ["json"]
new_query_string = urlencode(query_params, doseq=True)
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
def password_data(username, password):
"""Returns a dict with username and its encoded password.
:param username: Emby username
:param password: Emby password
:type username: str
:type password: str
:returns: Dictionary with username and encoded password
:rtype: dict
"""
return {
"username": username,
"password": hashlib.sha1(password.encode("utf-8")).hexdigest(),
"passwordMd5": hashlib.md5(password.encode("utf-8")).hexdigest(),
}
def create_headers(user_id, token=None):
"""Return header dict that is needed to talk to the Emby API.
:param user_id: Emby user ID
:param token: Authentication token for Emby
:type user_id: str
:type token: str
:returns: Headers for requests
:rtype: dict
"""
headers = {}
authorization = (
'MediaBrowser UserId="{user_id}", '
'Client="other", '
'Device="beets", '
'DeviceId="beets", '
'Version="0.0.0"'
).format(user_id=user_id)
headers["x-emby-authorization"] = authorization
if token:
headers["x-mediabrowser-token"] = token
return headers
def get_token(host, port, headers, auth_data):
"""Return token for a user.
:param host: Emby host
:param port: Emby port
:param headers: Headers for requests
:param auth_data: Username and encoded password for authentication
:type host: str
:type port: int
:type headers: dict
:type auth_data: dict
:returns: Access Token
:rtype: str
"""
url = api_url(host, port, "/Users/AuthenticateByName")
r = requests.post(url, headers=headers, data=auth_data)
return r.json().get("AccessToken")
def get_user(host, port, username):
"""Return user dict from server or None if there is no user.
:param host: Emby host
:param port: Emby port
:username: Username
:type host: str
:type port: int
:type username: str
:returns: Matched Users
:rtype: list
"""
url = api_url(host, port, "/Users/Public")
r = requests.get(url)
user = [i for i in r.json() if i["Name"] == username]
return user
class EmbyUpdate(BeetsPlugin):
def __init__(self):
super().__init__()
# Adding defaults.
config["emby"].add(
{
"host": "http://localhost",
"port": 8096,
"apikey": None,
"password": None,
}
)
self.register_listener("database_change", self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update for the end."""
self.register_listener("cli_exit", self.update)
def update(self, lib):
"""When the client exists try to send refresh request to Emby."""
self._log.info("Updating Emby library...")
host = config["emby"]["host"].get()
port = config["emby"]["port"].get()
username = config["emby"]["username"].get()
password = config["emby"]["password"].get()
userid = config["emby"]["userid"].get()
token = config["emby"]["apikey"].get()
# Check if at least a apikey or password is given.
if not any([password, token]):
self._log.warning("Provide at least Emby password or apikey.")
return
if not userid:
# Get user information from the Emby API.
user = get_user(host, port, username)
if not user:
self._log.warning(f"User {username} could not be found.")
return
userid = user[0]["Id"]
if not token:
# Create Authentication data and headers.
auth_data = password_data(username, password)
headers = create_headers(userid)
# Get authentication token.
token = get_token(host, port, headers, auth_data)
if not token:
self._log.warning("Could not get token for user {0}", username)
return
# Recreate headers with a token.
headers = create_headers(userid, token=token)
# Trigger the Update.
url = api_url(host, port, "/Library/Refresh")
r = requests.post(url, headers=headers)
if r.status_code != 204:
self._log.warning("Update could not be triggered")
else:
self._log.info("Update triggered.")
|
rights | views | # This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
import logging
import re
from components import decorators, helpers
from components.rights import forms
from django.forms.models import inlineformset_factory
from django.http import HttpResponse
from django.shortcuts import redirect, render
from django.urls import reverse
from django.utils.translation import ugettext as _
from main import models
LOGGER = logging.getLogger("archivematica.dashboard")
""" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
Rights-related
@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ """
def transfer_rights_list(request, uuid):
return rights_list(request, uuid, "transfer")
def transfer_rights_edit(request, uuid, id=None):
return rights_edit(request, uuid, id, "transfer")
def transfer_rights_delete(request, uuid, id):
return rights_delete(request, uuid, id, "transfer")
def transfer_grant_delete_context(request, uuid, id):
prompt = "Delete rights grant?"
cancel_url = reverse("rights_transfer:index", args=[uuid])
return {"action": "Delete", "prompt": prompt, "cancel_url": cancel_url}
@decorators.confirm_required("simple_confirm.html", transfer_grant_delete_context)
def transfer_rights_grant_delete(request, uuid, id):
return rights_grant_delete(request, uuid, id, "transfer")
def transfer_rights_grants_edit(request, uuid, id):
return rights_grants_edit(request, uuid, id, "transfer")
def ingest_rights_list(request, uuid):
return rights_list(request, uuid, "ingest")
def ingest_rights_edit(request, uuid, id=None):
return rights_edit(request, uuid, id, "ingest")
def ingest_rights_delete(request, uuid, id):
return rights_delete(request, uuid, id, "ingest")
def ingest_grant_delete_context(request, uuid, id):
prompt = "Delete rights grant?"
cancel_url = reverse("rights_ingest:index", args=[uuid])
return {"action": "Delete", "prompt": prompt, "cancel_url": cancel_url}
@decorators.confirm_required("simple_confirm.html", ingest_grant_delete_context)
def ingest_rights_grant_delete(request, uuid, id):
return rights_grant_delete(request, uuid, id, "ingest")
def ingest_rights_grants_edit(request, uuid, id):
return rights_grants_edit(request, uuid, id, "ingest")
def rights_parse_agent_id(input):
return 0
if input == "":
agentId = 0
else:
agentRaw = input
try:
int(agentRaw)
agentId = int(agentRaw)
except ValueError:
agentRe = re.compile(r"(.*)\[(\d*)\]")
match = agentRe.match(agentRaw)
if match:
agentId = match.group(2)
else:
agentId = 0
return agentId
def rights_edit(request, uuid, id=None, section="ingest"):
jobs = models.Job.objects.filter(sipuuid=uuid)
name = jobs.get_directory_name()
# flag indicating what kind of new content, if any, has been created
new_content_type_created = None
max_notes = 1
if id:
viewRights = models.RightsStatement.objects.get(pk=id)
agentId = None
if request.method == "POST":
postData = request.POST.copy()
"""
agentId = rights_parse_agent_id(postData.get('rightsholder'))
if agentId == 0 and postData.get('rightsholder') != '0' and postData.get('rightsholder') != '':
agent = models.RightsStatementLinkingAgentIdentifier()
agent.rightsstatement = viewRights
agent.linkingagentidentifiervalue = postData.get('rightsholder')
agent.save()
agentId = agent.id
postData.__setitem__('rightsholder', agentId)
"""
form = forms.RightsForm(postData, instance=viewRights)
form.cleaned_data = postData
viewRights = form.save()
else:
form = forms.RightsForm(instance=viewRights)
form.cleaned_data = viewRights
form.save()
# determine how many empty forms should be shown for children
extra_copyright_forms = (
max_notes
- models.RightsStatementCopyright.objects.filter(
rightsstatement=viewRights
).count()
)
extra_statute_forms = (
max_notes
- models.RightsStatementStatuteInformation.objects.filter(
rightsstatement=viewRights
).count()
)
extra_license_forms = (
max_notes
- models.RightsStatementLicense.objects.filter(
rightsstatement=viewRights
).count()
)
extra_other_forms = (
max_notes
- models.RightsStatementOtherRightsInformation.objects.filter(
rightsstatement=viewRights
).count()
)
else:
if request.method == "POST":
postData = request.POST.copy()
agentId = rights_parse_agent_id(postData.get("rightsholder"))
postData.__setitem__("rightsholder", agentId)
form = forms.RightsForm(postData)
else:
form = forms.RightsForm()
viewRights = models.RightsStatement()
extra_copyright_forms = max_notes
extra_statute_forms = max_notes
extra_license_forms = max_notes
extra_license_notes = max_notes
extra_other_forms = max_notes
# create inline formsets for child elements
CopyrightFormSet = inlineformset_factory(
models.RightsStatement,
models.RightsStatementCopyright,
extra=extra_copyright_forms,
can_delete=False,
form=forms.RightsCopyrightForm,
)
StatuteFormSet = inlineformset_factory(
models.RightsStatement,
models.RightsStatementStatuteInformation,
extra=extra_statute_forms,
can_delete=False,
form=forms.RightsStatuteForm,
)
LicenseFormSet = inlineformset_factory(
models.RightsStatement,
models.RightsStatementLicense,
extra=extra_license_forms,
can_delete=False,
form=forms.RightsLicenseForm,
)
OtherFormSet = inlineformset_factory(
models.RightsStatement,
models.RightsStatementOtherRightsInformation,
extra=extra_other_forms,
can_delete=False,
form=forms.RightsOtherRightsForm,
)
copyrightFormset = CopyrightFormSet()
statuteFormset = StatuteFormSet()
licenseFormset = LicenseFormSet()
otherFormset = OtherFormSet()
# handle form creation/saving
if request.method == "POST":
if id:
createdRights = viewRights
else:
sectionTypeID = {"transfer": "Transfer", "ingest": "SIP"}
type_id = helpers.get_metadata_type_id_by_description(
sectionTypeID[section]
)
newRights = models.RightsStatement(
metadataappliestotype=type_id, metadataappliestoidentifier=uuid
)
form = forms.RightsForm(request.POST, instance=newRights)
createdRights = form.save()
copyrightFormset = CopyrightFormSet(request.POST, instance=createdRights)
if not copyrightFormset.is_valid():
return render(request, "rights/rights_edit.html", locals())
createdCopyrightSet = copyrightFormset.save()
# establish whether or not there is a copyright information instance to use as a parent
if len(createdCopyrightSet) == 1:
createdCopyright = createdCopyrightSet[0]
else:
createdCopyright = False
# handle creation of new copyright notes, creating parent if necessary
if request.POST.get("copyright_note", "") != "":
# make new copyright record if it doesn't exist
if not createdCopyright:
try:
createdCopyright = models.RightsStatementCopyright.objects.get(
rightsstatement=createdRights
)
except:
createdCopyright = models.RightsStatementCopyright(
rightsstatement=createdRights
)
createdCopyright.save()
copyrightNote = models.RightsStatementCopyrightNote(
rightscopyright=createdCopyright
)
copyrightNote.copyrightnote = request.POST.get("copyright_note", "")
copyrightNote.save()
new_content_type_created = _("copyright")
# handle creation of new documentation identifiers
if (
request.POST.get("copyright_documentation_identifier_type", "") != ""
or request.POST.get("copyright_documentation_identifier_value", "") != ""
or request.POST.get("copyright_documentation_identifier_role", "")
):
# make new copyright record if it doesn't exist
if not createdCopyright:
try:
createdCopyright = models.RightsStatementCopyright.objects.get(
rightsstatement=createdRights
)
except:
createdCopyright = models.RightsStatementCopyright(
rightsstatement=createdRights
)
createdCopyright.save()
copyrightDocIdentifier = (
models.RightsStatementCopyrightDocumentationIdentifier(
rightscopyright=createdCopyright
)
)
copyrightDocIdentifier.copyrightdocumentationidentifiertype = (
request.POST.get("copyright_documentation_identifier_type", "")
)
copyrightDocIdentifier.copyrightdocumentationidentifiervalue = (
request.POST.get("copyright_documentation_identifier_value", "")
)
copyrightDocIdentifier.copyrightdocumentationidentifierrole = (
request.POST.get("copyright_documentation_identifier_role", "")
)
copyrightDocIdentifier.save()
new_content_type_created = _("copyright")
licenseFormset = LicenseFormSet(request.POST, instance=createdRights)
if not licenseFormset.is_valid():
return render(request, "rights/rights_edit.html", locals())
createdLicenseSet = licenseFormset.save()
# establish whether or not there is a license instance to use as a parent
if len(createdLicenseSet) == 1:
createdLicense = createdLicenseSet[0]
else:
createdLicense = False
# handle creation of new copyright notes, creating parent if necessary
if request.POST.get("license_note", "") != "":
# make new copyright record if it doesn't exist
if not createdLicense:
try:
createdLicense = models.RightsStatementLicense.objects.get(
rightsstatement=createdRights
)
except:
createdLicense = models.RightsStatementLicense(
rightsstatement=createdRights
)
createdLicense.save()
licenseNote = models.RightsStatementLicenseNote(
rightsstatementlicense=createdLicense
)
licenseNote.licensenote = request.POST.get("license_note", "")
licenseNote.save()
new_content_type_created = _("license")
# handle creation of new documentation identifiers
if (
request.POST.get("license_documentation_identifier_type", "") != ""
or request.POST.get("license_documentation_identifier_value", "") != ""
or request.POST.get("license_documentation_identifier_role", "")
):
# make new license record if it doesn't exist
if not createdLicense:
try:
createdLicense = models.RightsStatementLicense.objects.get(
rightsstatement=createdRights
)
except:
createdLicense = models.RightsStatementLicense(
rightsstatement=createdRights
)
createdLicense.save()
licenseDocIdentifier = models.RightsStatementLicenseDocumentationIdentifier(
rightsstatementlicense=createdLicense
)
licenseDocIdentifier.licensedocumentationidentifiertype = request.POST.get(
"license_documentation_identifier_type", ""
)
licenseDocIdentifier.licensedocumentationidentifiervalue = request.POST.get(
"license_documentation_identifier_value", ""
)
licenseDocIdentifier.licensedocumentationidentifierrole = request.POST.get(
"license_documentation_identifier_role", ""
)
licenseDocIdentifier.save()
new_content_type_created = _("license")
statuteFormset = StatuteFormSet(request.POST, instance=createdRights)
if not statuteFormset.is_valid():
return render(request, "rights/rights_edit.html", locals())
createdStatuteSet = statuteFormset.save()
if (
request.POST.get("statute_previous_pk", "") == "None"
and len(createdStatuteSet) == 1
):
new_content_type_created = _("statute")
noteCreated = False
for form in statuteFormset.forms:
statuteCreated = False
# handle documentation identifier creation for a parent that's a blank statute
if (
request.POST.get("statute_documentation_identifier_type_None", "") != ""
or request.POST.get("statute_documentation_identifier_value_None", "")
!= ""
or request.POST.get("statute_documentation_identifier_role_None", "")
!= ""
):
if form.instance.pk:
statuteCreated = form.instance
else:
statuteCreated = models.RightsStatementStatuteInformation(
rightsstatement=createdRights
)
statuteCreated.save()
statuteDocIdentifier = (
models.RightsStatementStatuteDocumentationIdentifier(
rightsstatementstatute=statuteCreated
)
)
statuteDocIdentifier.statutedocumentationidentifiertype = (
request.POST.get("statute_documentation_identifier_type_None", "")
)
statuteDocIdentifier.statutedocumentationidentifiervalue = (
request.POST.get("statute_documentation_identifier_value_None", "")
)
statuteDocIdentifier.statutedocumentationidentifierrole = (
request.POST.get("statute_documentation_identifier_role_None", "")
)
statuteDocIdentifier.save()
new_content_type_created = _("statute")
else:
# handle documentation identifier creation for a parent statute that already exists
if (
request.POST.get(
"statute_documentation_identifier_type_"
+ str(form.instance.pk),
"",
)
!= ""
or request.POST.get(
"statute_documentation_identifier_value_"
+ str(form.instance.pk),
"",
)
or request.POST.get(
"statute_documentation_identifier_role_"
+ str(form.instance.pk),
"",
)
):
statuteDocIdentifier = (
models.RightsStatementStatuteDocumentationIdentifier(
rightsstatementstatute=form.instance
)
)
statuteDocIdentifier.statutedocumentationidentifiertype = (
request.POST.get(
"statute_documentation_identifier_type_"
+ str(form.instance.pk),
"",
)
)
statuteDocIdentifier.statutedocumentationidentifiervalue = (
request.POST.get(
"statute_documentation_identifier_value_"
+ str(form.instance.pk),
"",
)
)
statuteDocIdentifier.statutedocumentationidentifierrole = (
request.POST.get(
"statute_documentation_identifier_role_"
+ str(form.instance.pk),
"",
)
)
statuteDocIdentifier.save()
new_content_type_created = _("statute")
# handle note creation for a parent that's a blank grant
if (
request.POST.get("new_statute_note_None", "") != ""
and not form.instance.pk
):
if not statuteCreated:
statuteCreated = models.RightsStatementStatuteInformation(
rightsstatement=createdRights
)
statuteCreated.save()
noteCreated = models.RightsStatementStatuteInformationNote(
rightsstatementstatute=statuteCreated
)
noteCreated.statutenote = request.POST.get("new_statute_note_None", "")
noteCreated.save()
new_content_type_created = _("statue")
else:
# handle note creation for a parent grant that already exists
if (
request.POST.get("new_statute_note_" + str(form.instance.pk), "")
!= ""
):
noteCreated = models.RightsStatementStatuteInformationNote(
rightsstatementstatute=form.instance
)
noteCreated.statutenote = request.POST.get(
"new_statute_note_" + str(form.instance.pk), ""
)
noteCreated.save()
new_content_type_created = _("statute")
# handle note creation for a parent that's just been created
if request.POST.get("new_statute_note_None", "") != "" and not noteCreated:
noteCreated = models.RightsStatementStatuteInformationNote(
rightsstatementstatute=form.instance
)
noteCreated.statutenote = request.POST.get("new_statute_note_None", "")
noteCreated.save()
# display (possibly revised) formset
statuteFormset = StatuteFormSet(instance=createdRights)
otherFormset = OtherFormSet(request.POST, instance=createdRights)
if not otherFormset.is_valid():
return render(request, "rights/rights_edit.html", locals())
createdOtherSet = otherFormset.save()
# establish whether or not there is an "other" instance to use as a parent
if len(createdOtherSet) == 1:
createdOther = createdOtherSet[0]
else:
createdOther = False
# handle creation of new "other" notes, creating parent if necessary
if request.POST.get("otherrights_note", "") != "":
# make new "other" record if it doesn't exist
if not createdOther:
try:
createdOther = (
models.RightsStatementOtherRightsInformation.objects.get(
rightsstatement=createdRights
)
)
except:
createdOther = models.RightsStatementOtherRightsInformation(
rightsstatement=createdRights
)
createdOther.save()
otherNote = models.RightsStatementOtherRightsInformationNote(
rightsstatementotherrights=createdOther
)
otherNote.otherrightsnote = request.POST.get("otherrights_note", "")
otherNote.save()
new_content_type_created = "other"
# handle creation of new documentation identifiers
if (
request.POST.get("other_documentation_identifier_type", "") != ""
or request.POST.get("other_documentation_identifier_value", "") != ""
or request.POST.get("other_documentation_identifier_role", "")
):
# make new other record if it doesn't exist
if not createdOther:
try:
createdOther = (
models.RightsStatementOtherRightsInformation.objects.get(
rightsstatement=createdRights
)
)
except:
createdOther = models.RightsStatementOtherRightsInformation(
rightsstatement=createdRights
)
createdOther.save()
otherDocIdentifier = (
models.RightsStatementOtherRightsDocumentationIdentifier(
rightsstatementotherrights=createdOther
)
)
otherDocIdentifier.otherrightsdocumentationidentifiertype = (
request.POST.get("other_documentation_identifier_type", "")
)
otherDocIdentifier.otherrightsdocumentationidentifiervalue = (
request.POST.get("other_documentation_identifier_value", "")
)
otherDocIdentifier.otherrightsdocumentationidentifierrole = (
request.POST.get("other_documentation_identifier_role", "")
)
otherDocIdentifier.save()
new_content_type_created = "other"
if (
request.POST.get("next_button", "") is not None
and request.POST.get("next_button", "") != ""
):
return redirect("rights_%s:grants_edit" % section, uuid, createdRights.pk)
else:
url = reverse("rights_%s:edit" % section, args=[uuid, createdRights.pk])
try:
url = url + "?created=" + new_content_type_created
except:
pass
return redirect(url)
else:
copyrightFormset = CopyrightFormSet(instance=viewRights)
statuteFormset = StatuteFormSet(instance=viewRights)
licenseFormset = LicenseFormSet(instance=viewRights)
otherFormset = OtherFormSet(instance=viewRights)
# show what content's been created after a redirect
if request.GET.get("created", "") != "":
new_content_type_created = request.GET.get("created", "")
return render(request, "rights/rights_edit.html", locals())
def rights_grants_edit(request, uuid, id, section="ingest"):
jobs = models.Job.objects.filter(sipuuid=uuid)
name = jobs.get_directory_name()
viewRights = models.RightsStatement.objects.get(pk=id)
# determine how many empty forms should be shown for children
extra_grant_forms = 1
# create inline formsets for child elements
GrantFormSet = inlineformset_factory(
models.RightsStatement,
models.RightsStatementRightsGranted,
extra=extra_grant_forms,
can_delete=False,
form=forms.RightsGrantedForm,
)
# handle form creation/saving
if request.method == "POST":
grantFormset = GrantFormSet(request.POST, instance=viewRights)
grantFormset.save()
restrictionCreated = False
noteCreated = False
for form in grantFormset.forms:
grantCreated = False
# handle restriction creation for a parent that's a blank grant
if (
request.POST.get("new_rights_restriction_None", "") != ""
and not form.instance.pk
):
grantCreated = models.RightsStatementRightsGranted(
rightsstatement=viewRights
)
grantCreated.save()
restrictionCreated = models.RightsStatementRightsGrantedRestriction(
rightsgranted=grantCreated
)
restrictionCreated.restriction = request.POST.get(
"new_rights_restriction_None", ""
)
restrictionCreated.save()
else:
# handle restriction creation for a parent grant that already exists
if (
request.POST.get(
"new_rights_restriction_" + str(form.instance.pk), ""
)
!= ""
):
restrictionCreated = models.RightsStatementRightsGrantedRestriction(
rightsgranted=form.instance
)
restrictionCreated.restriction = request.POST.get(
"new_rights_restriction_" + str(form.instance.pk), ""
)
restrictionCreated.save()
# handle note creation for a parent that's a blank grant
if (
request.POST.get("new_rights_note_None", "") != ""
and not form.instance.pk
):
if not grantCreated:
grantCreated = models.RightsStatementRightsGranted(
rightsstatement=viewRights
)
grantCreated.save()
noteCreated = models.RightsStatementRightsGrantedNote(
rightsgranted=grantCreated
)
noteCreated.rightsgrantednote = request.POST.get(
"new_rights_note_None", ""
)
noteCreated.save()
else:
# handle note creation for a parent grant that already exists
if (
request.POST.get("new_rights_note_" + str(form.instance.pk), "")
!= ""
):
noteCreated = models.RightsStatementRightsGrantedNote(
rightsgranted=form.instance
)
noteCreated.rightsgrantednote = request.POST.get(
"new_rights_note_" + str(form.instance.pk), ""
)
noteCreated.save()
# handle restriction creation for a parent that's just been created
if (
request.POST.get("new_rights_restriction_None", "") != ""
and not restrictionCreated
):
restrictionCreated = models.RightsStatementRightsGrantedRestriction(
rightsgranted=form.instance
)
restrictionCreated.restriction = request.POST.get(
"new_rights_restriction_None", ""
)
restrictionCreated.save()
# handle note creation for a parent that's just been created
if request.POST.get("new_rights_note_None", "") != "" and not noteCreated:
noteCreated = models.RightsStatementRightsGrantedNote(
rightsgranted=form.instance
)
noteCreated.rightsgrantednote = request.POST.get("new_rights_note_None", "")
noteCreated.save()
# display (possibly revised) formset
grantFormset = GrantFormSet(instance=viewRights)
if request.method == "POST":
if (
request.POST.get("next_button", "") is not None
and request.POST.get("next_button", "") != ""
):
return redirect("rights_%s:index" % section, uuid)
else:
url = reverse("rights_%s:grants_edit" % section, args=[uuid, viewRights.pk])
return redirect(url)
else:
return render(request, "rights/rights_grants_edit.html", locals())
def rights_delete(request, uuid, id, section):
models.RightsStatement.objects.get(pk=id).delete()
return redirect("rights_%s:index" % section, uuid)
def rights_grant_delete(request, uuid, id, section):
models.RightsStatementRightsGranted.objects.get(pk=id).delete()
return redirect("rights_%s:index" % section, uuid)
def rights_holders_lookup(request, id):
try:
agent = models.RightsStatementLinkingAgentIdentifier.objects.get(pk=id)
result = agent.linkingagentidentifiervalue + " [" + str(agent.id) + "]"
except:
result = ""
return HttpResponse(result)
def rights_holders_autocomplete(request):
search_text = request.GET.get("text", "")
response = {}
agents = models.RightsStatementLinkingAgentIdentifier.objects.filter(
linkingagentidentifiervalue__icontains=search_text
)
for agent in agents:
value = agent.linkingagentidentifiervalue + " [" + str(agent.id) + "]"
response[value] = value
return helpers.json_response(response)
def rights_list(request, uuid, section):
jobs = models.Job.objects.filter(sipuuid=uuid)
name = jobs.get_directory_name()
# See MetadataAppliesToTypes table
types = {"transfer": "Transfer", "ingest": "SIP", "file": "File"}
type_id = helpers.get_metadata_type_id_by_description(types[section])
grants = models.RightsStatementRightsGranted.objects.filter(
rightsstatement__metadataappliestotype=type_id,
rightsstatement__metadataappliestoidentifier__exact=uuid,
)
# When listing ingest rights we also want to show transfer rights
# The only way I've found to get the related transfer of a SIP is looking into the File table
transfer_grants = None
if section == "ingest":
try:
transfer_uuids = (
models.File.objects.filter(
sip_id=uuid, removedtime__isnull=True, transfer_id__isnull=False
)
.values_list("transfer", flat=True)
.distinct()
)
transfer_grants = models.RightsStatementRightsGranted.objects.filter(
rightsstatement__metadataappliestotype__description=types["transfer"],
rightsstatement__metadataappliestoidentifier__in=transfer_uuids,
)
except Exception:
LOGGER.exception("Error fetching Transfer rights")
return render(
request,
"rights/rights_list.html",
{
"grants": grants,
"jobs": jobs,
"name": name,
"section": section,
"transfer_grants": transfer_grants,
"uuid": uuid,
},
)
|
api | insight | import json
from functools import lru_cache
from typing import Any, Dict, List, Optional, Type, Union, cast
import structlog
from django.db import transaction
from django.db.models import Count, Prefetch, QuerySet
from django.db.models.query_utils import Q
from django.http import HttpResponse
from django.utils.text import slugify
from django.utils.timezone import now
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse
from posthog import schema
from posthog.api.documentation import extend_schema
from posthog.api.forbid_destroy_model import ForbidDestroyModel
from posthog.api.insight_serializers import (
FunnelSerializer,
FunnelStepsResultsSerializer,
TrendResultsSerializer,
TrendSerializer,
)
from posthog.api.routing import StructuredViewSetMixin
from posthog.api.shared import UserBasicSerializer
from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin
from posthog.api.utils import format_paginated_url
from posthog.auth import SharingAccessTokenAuthentication
from posthog.caching.fetch_from_cache import (
InsightResult,
fetch_cached_insight_result,
synchronously_update_cache,
)
from posthog.caching.insights_api import should_refresh_insight
from posthog.client import sync_execute
from posthog.constants import (
BREAKDOWN_VALUES_LIMIT,
INSIGHT,
INSIGHT_FUNNELS,
INSIGHT_PATHS,
INSIGHT_STICKINESS,
PATHS_INCLUDE_EVENT_TYPES,
TRENDS_STICKINESS,
FunnelVizType,
)
from posthog.decorators import cached_by_filters
from posthog.helpers.multi_property_breakdown import (
protect_old_clients_from_multi_property_default,
)
from posthog.hogql.errors import HogQLException
from posthog.kafka_client.topics import KAFKA_METRICS_TIME_TO_SEE_DATA
from posthog.models import DashboardTile, Filter, Insight, User
from posthog.models.activity_logging.activity_log import (
Change,
Detail,
changes_between,
describe_change,
load_activity,
log_activity,
)
from posthog.models.activity_logging.activity_page import activity_page_response
from posthog.models.dashboard import Dashboard
from posthog.models.filters import RetentionFilter
from posthog.models.filters.path_filter import PathFilter
from posthog.models.filters.stickiness_filter import StickinessFilter
from posthog.models.insight import InsightViewed
from posthog.models.utils import UUIDT
from posthog.permissions import (
ProjectMembershipNecessaryPermissions,
TeamMemberAccessPermission,
)
from posthog.queries.funnels import (
ClickhouseFunnelTimeToConvert,
ClickhouseFunnelTrends,
)
from posthog.queries.funnels.utils import get_funnel_order_class
from posthog.queries.paths.paths import Paths
from posthog.queries.retention import Retention
from posthog.queries.stickiness import Stickiness
from posthog.queries.trends.trends import Trends
from posthog.queries.util import get_earliest_timestamp
from posthog.rate_limit import (
ClickHouseBurstRateThrottle,
ClickHouseSustainedRateThrottle,
)
from posthog.settings import CAPTURE_TIME_TO_SEE_DATA, SITE_URL
from posthog.settings.data_stores import CLICKHOUSE_CLUSTER
from posthog.user_permissions import UserPermissionsSerializerMixin
from posthog.utils import (
DEFAULT_DATE_FROM_DAYS,
refresh_requested_by_client,
relative_date_parse,
str_to_bool,
)
from prometheus_client import Counter
from rest_framework import request, serializers, status, viewsets
from rest_framework.decorators import action
from rest_framework.exceptions import ParseError, PermissionDenied, ValidationError
from rest_framework.parsers import JSONParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_framework_csv import renderers as csvrenderers
from sentry_sdk import capture_exception
from statshog.defaults.django import statsd
logger = structlog.get_logger(__name__)
INSIGHT_REFRESH_INITIATED_COUNTER = Counter(
"insight_refresh_initiated",
"Insight refreshes initiated, based on should_refresh_insight().",
labelnames=["is_shared"],
)
def log_insight_activity(
activity: str,
insight: Insight,
insight_id: int,
insight_short_id: str,
organization_id: UUIDT,
team_id: int,
user: User,
changes: Optional[List[Change]] = None,
) -> None:
"""
Insight id and short_id are passed separately as some activities (like delete) alter the Insight instance
The experiments feature creates insights without a name, this does not log those
"""
insight_name: Optional[str] = insight.name if insight.name else insight.derived_name
if insight_name:
log_activity(
organization_id=organization_id,
team_id=team_id,
user=user,
item_id=insight_id,
scope="Insight",
activity=activity,
detail=Detail(
name=insight_name, changes=changes, short_id=insight_short_id
),
)
class QuerySchemaParser(JSONParser):
"""
A query schema parser that only parses the query field and validates it against the schema if it is present
If there is no query field this parser is a no-op
"""
def parse(self, stream, media_type=None, parser_context=None):
data = super(QuerySchemaParser, self).parse(stream, media_type, parser_context)
try:
query = data.get("query", None)
if query:
schema.Model.model_validate(query)
except Exception as error:
raise ParseError(detail=str(error))
else:
return data
class DashboardTileBasicSerializer(serializers.ModelSerializer):
class Meta:
model = DashboardTile
fields = ["id", "dashboard_id", "deleted"]
class InsightBasicSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer):
"""
Simplified serializer to speed response times when loading large amounts of objects.
"""
dashboard_tiles = DashboardTileBasicSerializer(many=True, read_only=True)
created_by = UserBasicSerializer(read_only=True)
class Meta:
model = Insight
fields = [
"id",
"short_id",
"name",
"derived_name",
"filters",
"query",
"dashboards",
"dashboard_tiles",
"description",
"last_refresh",
"refreshing",
"saved",
"tags",
"updated_at",
"created_by",
"created_at",
"last_modified_at",
"favorited",
]
read_only_fields = ("short_id", "updated_at", "last_refresh", "refreshing")
def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Any:
raise NotImplementedError()
def to_representation(self, instance):
representation = super().to_representation(instance)
representation["dashboards"] = [
tile["dashboard_id"] for tile in representation["dashboard_tiles"]
]
filters = instance.dashboard_filters()
if not filters.get("date_from") and not instance.query:
filters.update({"date_from": f"-{DEFAULT_DATE_FROM_DAYS}d"})
representation["filters"] = filters
return representation
@lru_cache(maxsize=1)
def _dashboard_tiles(self, instance):
return [tile.dashboard_id for tile in instance.dashboard_tiles.all()]
class InsightSerializer(InsightBasicSerializer, UserPermissionsSerializerMixin):
result = serializers.SerializerMethodField()
last_refresh = serializers.SerializerMethodField(
read_only=True,
help_text="""
The datetime this insight's results were generated.
If added to one or more dashboards the insight can be refreshed separately on each.
Returns the appropriate last_refresh datetime for the context the insight is viewed in
(see from_dashboard query parameter).
""",
)
next_allowed_client_refresh = serializers.SerializerMethodField(
read_only=True,
help_text="""
The earliest possible datetime at which we'll allow the cached results for this insight to be refreshed
by querying the database.
""",
)
is_cached = serializers.SerializerMethodField(read_only=True)
created_by = UserBasicSerializer(read_only=True)
last_modified_by = UserBasicSerializer(read_only=True)
effective_restriction_level = serializers.SerializerMethodField()
effective_privilege_level = serializers.SerializerMethodField()
timezone = serializers.SerializerMethodField(
help_text="The timezone this chart is displayed in."
)
dashboards = serializers.PrimaryKeyRelatedField(
help_text="""
DEPRECATED. Will be removed in a future release. Use dashboard_tiles instead.
A dashboard ID for each of the dashboards that this insight is displayed on.
""",
many=True,
required=False,
queryset=Dashboard.objects.all(),
)
dashboard_tiles = DashboardTileBasicSerializer(
many=True,
read_only=True,
help_text="""
A dashboard tile ID and dashboard_id for each of the dashboards that this insight is displayed on.
""",
)
query = serializers.JSONField(
required=False, allow_null=True, help_text="Query node JSON string"
)
class Meta:
model = Insight
fields = [
"id",
"short_id",
"name",
"derived_name",
"filters",
"query",
"order",
"deleted",
"dashboards",
"dashboard_tiles",
"last_refresh",
"next_allowed_client_refresh",
"result",
"created_at",
"created_by",
"description",
"updated_at",
"tags",
"favorited",
"saved",
"last_modified_at",
"last_modified_by",
"is_sample",
"effective_restriction_level",
"effective_privilege_level",
"timezone",
"is_cached",
]
read_only_fields = (
"created_at",
"created_by",
"last_modified_at",
"last_modified_by",
"short_id",
"updated_at",
"is_sample",
"effective_restriction_level",
"effective_privilege_level",
"timezone",
"refreshing",
"is_cached",
)
def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Insight:
request = self.context["request"]
tags = validated_data.pop(
"tags", None
) # tags are created separately as global tag relationships
team_id = self.context["team_id"]
created_by = validated_data.pop("created_by", request.user)
dashboards = validated_data.pop("dashboards", None)
insight = Insight.objects.create(
team_id=team_id,
created_by=created_by,
last_modified_by=request.user,
**validated_data,
)
if dashboards is not None:
for dashboard in Dashboard.objects.filter(
id__in=[d.id for d in dashboards]
).all():
if dashboard.team != insight.team:
raise serializers.ValidationError("Dashboard not found")
DashboardTile.objects.create(
insight=insight, dashboard=dashboard, last_refresh=now()
)
insight.last_refresh = (
now()
) # set last refresh if the insight is on at least one dashboard
# Manual tag creation since this create method doesn't call super()
self._attempt_set_tags(tags, insight)
log_insight_activity(
activity="created",
insight=insight,
insight_id=insight.id,
insight_short_id=insight.short_id,
organization_id=self.context["request"].user.current_organization_id,
team_id=team_id,
user=self.context["request"].user,
)
return insight
def update(self, instance: Insight, validated_data: Dict, **kwargs) -> Insight:
dashboards_before_change: List[Union[str, Dict]] = []
try:
# since it is possible to be undeleting a soft deleted insight
# the state captured before the update has to include soft deleted insights
# or we can't capture undeletes to the activity log
before_update = Insight.objects_including_soft_deleted.prefetch_related(
"tagged_items__tag", "dashboards"
).get(pk=instance.id)
dashboards_before_change = [
describe_change(dt.dashboard) for dt in instance.dashboard_tiles.all()
]
dashboards_before_change = sorted(
dashboards_before_change,
key=lambda x: -1 if isinstance(x, str) else x["id"],
)
except Insight.DoesNotExist:
before_update = None
# Remove is_sample if it's set as user has altered the sample configuration
validated_data["is_sample"] = False
if validated_data.keys() & Insight.MATERIAL_INSIGHT_FIELDS:
instance.last_modified_at = now()
instance.last_modified_by = self.context["request"].user
if validated_data.get("deleted", False):
DashboardTile.objects_including_soft_deleted.filter(
insight__id=instance.id
).update(deleted=True)
else:
dashboards = validated_data.pop("dashboards", None)
if dashboards is not None:
self._update_insight_dashboards(dashboards, instance)
updated_insight = super().update(instance, validated_data)
self._log_insight_update(
before_update, dashboards_before_change, updated_insight
)
self.user_permissions.reset_insights_dashboard_cached_results()
return updated_insight
def _log_insight_update(
self, before_update, dashboards_before_change, updated_insight
):
"""
KLUDGE: Automatic detection of insight dashboard updates is flaky
This removes any detected update from the auto-detected changes
And adds in a synthetic change using data captured at the point dashboards are updated
"""
detected_changes = [
c
for c in changes_between(
"Insight", previous=before_update, current=updated_insight
)
if c.field != "dashboards"
]
synthetic_dashboard_changes = self._synthetic_dashboard_changes(
dashboards_before_change
)
changes = detected_changes + synthetic_dashboard_changes
with transaction.atomic():
log_insight_activity(
activity="updated",
insight=updated_insight,
insight_id=updated_insight.id,
insight_short_id=updated_insight.short_id,
organization_id=self.context["request"].user.current_organization_id,
team_id=self.context["team_id"],
user=self.context["request"].user,
changes=changes,
)
def _synthetic_dashboard_changes(
self, dashboards_before_change: List[Dict]
) -> List[Change]:
artificial_dashboard_changes = self.context.get("after_dashboard_changes", [])
if artificial_dashboard_changes:
return [
Change(
type="Insight",
action="changed",
field="dashboards",
before=dashboards_before_change,
after=artificial_dashboard_changes,
)
]
return []
def _update_insight_dashboards(
self, dashboards: List[Dashboard], instance: Insight
) -> None:
old_dashboard_ids = [
tile.dashboard_id for tile in instance.dashboard_tiles.all()
]
new_dashboard_ids = [d.id for d in dashboards if not d.deleted]
if sorted(old_dashboard_ids) == sorted(new_dashboard_ids):
return
ids_to_add = [id for id in new_dashboard_ids if id not in old_dashboard_ids]
ids_to_remove = [id for id in old_dashboard_ids if id not in new_dashboard_ids]
candidate_dashboards = Dashboard.objects.filter(id__in=ids_to_add)
dashboard: Dashboard
for dashboard in candidate_dashboards:
# does this user have permission on dashboards to add... if they are restricted
# it will mean this dashboard becomes restricted because of the patch
if (
self.user_permissions.dashboard(dashboard).effective_privilege_level
== Dashboard.PrivilegeLevel.CAN_VIEW
):
raise PermissionDenied(
f"You don't have permission to add insights to dashboard: {dashboard.id}"
)
if dashboard.team != instance.team:
raise serializers.ValidationError("Dashboard not found")
tile, _ = DashboardTile.objects_including_soft_deleted.get_or_create(
insight=instance, dashboard=dashboard
)
if tile.deleted:
tile.deleted = False
tile.save()
if ids_to_remove:
DashboardTile.objects.filter(
dashboard_id__in=ids_to_remove, insight=instance
).update(deleted=True)
self.context["after_dashboard_changes"] = [
describe_change(d) for d in dashboards if not d.deleted
]
def get_result(self, insight: Insight):
return self.insight_result(insight).result
def get_timezone(self, insight: Insight):
# :TODO: This doesn't work properly as background cache updates don't set timezone in the response.
# This should get refactored.
if refresh_requested_by_client(self.context["request"]):
return insight.team.timezone
return self.insight_result(insight).timezone
def get_last_refresh(self, insight: Insight):
return self.insight_result(insight).last_refresh
def get_next_allowed_client_refresh(self, insight: Insight):
return self.insight_result(insight).next_allowed_client_refresh
def get_is_cached(self, insight: Insight):
return self.insight_result(insight).is_cached
def get_effective_restriction_level(
self, insight: Insight
) -> Dashboard.RestrictionLevel:
if self.context.get("is_shared"):
return Dashboard.RestrictionLevel.ONLY_COLLABORATORS_CAN_EDIT
return self.user_permissions.insight(insight).effective_restriction_level
def get_effective_privilege_level(
self, insight: Insight
) -> Dashboard.PrivilegeLevel:
if self.context.get("is_shared"):
return Dashboard.PrivilegeLevel.CAN_VIEW
return self.user_permissions.insight(insight).effective_privilege_level
def to_representation(self, instance: Insight):
representation = super().to_representation(instance)
# the ORM doesn't know about deleted dashboard tiles
# when they have just been updated
# we store them and can use that list to correct the response
# and avoid refreshing from the DB
if self.context.get("after_dashboard_changes"):
representation["dashboards"] = [
described_dashboard["id"]
for described_dashboard in self.context["after_dashboard_changes"]
]
else:
representation["dashboards"] = [
tile["dashboard_id"] for tile in representation["dashboard_tiles"]
]
dashboard: Optional[Dashboard] = self.context.get("dashboard")
representation["filters"] = instance.dashboard_filters(dashboard=dashboard)
if "insight" not in representation["filters"] and not representation["query"]:
representation["filters"]["insight"] = "TRENDS"
representation["filters_hash"] = self.insight_result(instance).cache_key
return representation
@lru_cache(maxsize=1)
def insight_result(self, insight: Insight) -> InsightResult:
dashboard = self.context.get("dashboard", None)
dashboard_tile = self.dashboard_tile_from_context(insight, dashboard)
target = insight if dashboard is None else dashboard_tile
is_shared = self.context.get("is_shared", False)
refresh_insight_now, refresh_frequency = should_refresh_insight(
insight,
dashboard_tile,
request=self.context["request"],
is_shared=is_shared,
)
if refresh_insight_now:
INSIGHT_REFRESH_INITIATED_COUNTER.labels(is_shared=is_shared).inc()
return synchronously_update_cache(insight, dashboard, refresh_frequency)
# :TODO: Clear up if tile can be null or not
return fetch_cached_insight_result(target or insight, refresh_frequency)
@lru_cache(
maxsize=1
) # each serializer instance should only deal with one insight/tile combo
def dashboard_tile_from_context(
self, insight: Insight, dashboard: Optional[Dashboard]
) -> Optional[DashboardTile]:
dashboard_tile: Optional[DashboardTile] = self.context.get(
"dashboard_tile", None
)
if dashboard_tile and dashboard_tile.deleted:
self.context.update({"dashboard_tile": None})
dashboard_tile = None
if not dashboard_tile and dashboard:
dashboard_tile = DashboardTile.dashboard_queryset(
DashboardTile.objects.filter(insight=insight, dashboard=dashboard)
).first()
return dashboard_tile
class InsightViewSet(
TaggedItemViewSetMixin,
StructuredViewSetMixin,
ForbidDestroyModel,
viewsets.ModelViewSet,
):
serializer_class = InsightSerializer
permission_classes = [
IsAuthenticated,
ProjectMembershipNecessaryPermissions,
TeamMemberAccessPermission,
]
throttle_classes = [
ClickHouseBurstRateThrottle,
ClickHouseSustainedRateThrottle,
]
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (
csvrenderers.CSVRenderer,
)
filter_backends = [DjangoFilterBackend]
filterset_fields = ["short_id", "created_by"]
include_in_docs = True
retention_query_class = Retention
stickiness_query_class = Stickiness
paths_query_class = Paths
parser_classes = (QuerySchemaParser,)
def get_serializer_class(self) -> Type[serializers.BaseSerializer]:
if (self.action == "list" or self.action == "retrieve") and str_to_bool(
self.request.query_params.get("basic", "0")
):
return InsightBasicSerializer
return super().get_serializer_class()
def get_authenticators(self):
return [SharingAccessTokenAuthentication(), *super().get_authenticators()]
def get_serializer_context(self) -> Dict[str, Any]:
context = super().get_serializer_context()
context["is_shared"] = isinstance(
self.request.successful_authenticator, SharingAccessTokenAuthentication
)
return context
def get_permissions(self):
if isinstance(
self.request.successful_authenticator, SharingAccessTokenAuthentication
) and self.action in (
"retrieve",
"list",
):
# Anonymous users authenticated via SharingAccessTokenAuthentication get read-only access to insights
return []
return super().get_permissions()
def get_queryset(self) -> QuerySet:
queryset: QuerySet
if isinstance(
self.request.successful_authenticator, SharingAccessTokenAuthentication
):
queryset = Insight.objects.filter(
id__in=self.request.successful_authenticator.sharing_configuration.get_connected_insight_ids()
)
elif (
self.action == "partial_update"
and self.request.data.get("deleted") is False
):
# an insight can be un-deleted by patching {"deleted": False}
queryset = Insight.objects_including_soft_deleted.all()
else:
queryset = Insight.objects.all()
# Optimize tag retrieval
queryset = self.prefetch_tagged_items_if_available(queryset)
# Disallow access to other teams' insights
queryset = self.filter_queryset_by_parents_lookups(queryset)
queryset = queryset.prefetch_related(
Prefetch(
# TODO deprecate this field entirely
"dashboards",
queryset=Dashboard.objects.all().select_related("team__organization"),
),
Prefetch(
"dashboard_tiles",
queryset=DashboardTile.objects.select_related(
"dashboard__team__organization"
),
),
)
queryset = queryset.select_related("created_by", "last_modified_by", "team")
if self.action == "list":
queryset = queryset.prefetch_related("tagged_items__tag")
queryset = self._filter_request(self.request, queryset)
if (
self.request.query_params.get("include_query_insights", "false").lower()
!= "true"
):
queryset = queryset.exclude(Q(filters={}) & Q(query__isnull=False))
order = self.request.GET.get("order", None)
if order:
queryset = queryset.order_by(order)
else:
queryset = queryset.order_by("order")
return queryset
@action(methods=["GET"], detail=False)
def my_last_viewed(self, request: request.Request, *args, **kwargs) -> Response:
"""
Returns basic details about the last 5 insights viewed by this user. Most recently viewed first.
"""
insight_queryset = (
InsightViewed.objects.filter(team=self.team, user=cast(User, request.user))
.select_related("insight")
.exclude(insight__deleted=True)
.only("insight")
)
if (
self.request.query_params.get("include_query_insights", "false").lower()
!= "true"
):
insight_queryset = insight_queryset.exclude(
Q(insight__filters={}) & Q(insight__query__isnull=False)
)
recently_viewed = [
rv.insight for rv in (insight_queryset.order_by("-last_viewed_at")[:5])
]
response = InsightBasicSerializer(recently_viewed, many=True)
return Response(data=response.data, status=status.HTTP_200_OK)
def _filter_request(self, request: request.Request, queryset: QuerySet) -> QuerySet:
filters = request.GET.dict()
for key in filters:
if key == "saved":
if str_to_bool(request.GET["saved"]):
queryset = queryset.annotate(dashboards_count=Count("dashboards"))
queryset = queryset.filter(
Q(saved=True) | Q(dashboards_count__gte=1)
)
else:
queryset = queryset.filter(Q(saved=False))
elif key == "feature_flag":
feature_flag = request.GET["feature_flag"]
queryset = queryset.filter(
Q(filters__breakdown__icontains=f"$feature/{feature_flag}")
| Q(filters__properties__icontains=feature_flag)
)
elif key == "user":
queryset = queryset.filter(created_by=request.user)
elif key == "favorited":
queryset = queryset.filter(Q(favorited=True))
elif key == "date_from":
queryset = queryset.filter(
last_modified_at__gt=relative_date_parse(
request.GET["date_from"], self.team.timezone_info
)
)
elif key == "date_to":
queryset = queryset.filter(
last_modified_at__lt=relative_date_parse(
request.GET["date_to"], self.team.timezone_info
)
)
elif key == INSIGHT:
insight = request.GET[INSIGHT]
if insight == "JSON":
queryset = queryset.filter(query__isnull=False)
queryset = queryset.exclude(
query__kind="DataTableNode", query__source__kind="HogQLQuery"
)
elif insight == "SQL":
queryset = queryset.filter(query__isnull=False)
queryset = queryset.filter(
query__kind="DataTableNode", query__source__kind="HogQLQuery"
)
else:
queryset = queryset.filter(query__isnull=True)
queryset = queryset.filter(filters__insight=insight)
elif key == "search":
queryset = queryset.filter(
Q(name__icontains=request.GET["search"])
| Q(derived_name__icontains=request.GET["search"])
| Q(tagged_items__tag__name__icontains=request.GET["search"])
| Q(description__icontains=request.GET["search"])
)
elif key == "dashboards":
dashboards_filter = request.GET["dashboards"]
if dashboards_filter:
dashboards_ids = json.loads(dashboards_filter)
for dashboard_id in dashboards_ids:
# filter by dashboards one at a time so the filter is AND not OR
queryset = queryset.filter(
id__in=DashboardTile.objects.filter(
dashboard__id=dashboard_id
)
.values_list("insight__id", flat=True)
.all()
)
return queryset
@extend_schema(
parameters=[
OpenApiParameter(
name="refresh",
type=OpenApiTypes.BOOL,
description="""
The client can request that an insight be refreshed by setting the `refresh=true` parameter.
The server will then decide if the data should or not be refreshed based on a set of heuristics
meant to determine the staleness of cached data. The result will contain as `is_cached` field
that indicates whether the insight was actually refreshed or not through the request.""",
),
OpenApiParameter(
name="from_dashboard",
type=OpenApiTypes.INT,
description="""
When loading an insight for a dashboard pass a `from_dashboard` query parameter containing the dashboard ID
e.g. `"/api/projects/{team_id}/insights/{insight_id}?from_dashboard={dashboard_id}"`
Insights can be added to more than one dashboard, this allows the insight to be loaded in the correct context.
Using the correct cache and enriching the response with dashboard specific config (e.g. layouts or colors)""",
),
],
)
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
serializer_context = self.get_serializer_context()
dashboard_tile: Optional[DashboardTile] = None
dashboard_id = request.query_params.get("from_dashboard", None)
if dashboard_id is not None:
dashboard_tile = (
DashboardTile.objects.filter(
dashboard__id=dashboard_id, insight__id=instance.id
)
.select_related("dashboard")
.first()
)
if dashboard_tile is not None:
# context is used in the to_representation method to report filters used
serializer_context.update({"dashboard": dashboard_tile.dashboard})
serialized_data = self.get_serializer(instance, context=serializer_context).data
if dashboard_tile is not None:
serialized_data["color"] = dashboard_tile.color
layouts = dashboard_tile.layouts
# workaround because DashboardTiles layouts were migrated as stringified JSON :/
if isinstance(layouts, str):
layouts = json.loads(layouts)
serialized_data["layouts"] = layouts
return Response(serialized_data)
# ******************************************
# Calculated Insight Endpoints
# /projects/:id/insights/trend
# /projects/:id/insights/funnel
# /projects/:id/insights/retention
# /projects/:id/insights/path
#
# Request parameters and caching are handled here and passed onto respective .queries classes
# ******************************************
# ******************************************
# /projects/:id/insights/trend
#
# params:
# - from_dashboard: (string) determines trend is being retrieved from dashboard item to update dashboard_item metadata
# - shown_as: (string: Volume, Stickiness) specifies the trend aggregation type
# - **shared filter types
# ******************************************
@extend_schema(
request=TrendSerializer,
methods=["POST"],
tags=["trend"],
operation_id="Trends",
responses=TrendResultsSerializer,
)
@action(methods=["GET", "POST"], detail=False)
def trend(self, request: request.Request, *args: Any, **kwargs: Any):
try:
serializer = TrendSerializer(request=request)
serializer.is_valid(raise_exception=True)
except Exception as e:
capture_exception(e)
try:
result = self.calculate_trends(request)
except HogQLException as e:
raise ValidationError(str(e))
filter = Filter(request=request, team=self.team)
next = (
format_paginated_url(request, filter.offset, BREAKDOWN_VALUES_LIMIT)
if len(result["result"]) >= BREAKDOWN_VALUES_LIMIT
else None
)
if self.request.accepted_renderer.format == "csv":
csvexport = []
for item in result["result"]:
line = {"series": item["action"].get("custom_name") or item["label"]}
for index, data in enumerate(item["data"]):
line[item["labels"][index]] = data
csvexport.append(line)
renderer = csvrenderers.CSVRenderer()
renderer.header = csvexport[0].keys()
export = renderer.render(csvexport)
if request.GET.get("export_insight_id"):
export = (
"{}/insights/{}/\n".format(
SITE_URL, request.GET["export_insight_id"]
).encode()
+ export
)
response = HttpResponse(export)
response[
"Content-Disposition"
] = 'attachment; filename="{name} ({date_from} {date_to}) from PostHog.csv"'.format(
name=slugify(request.GET.get("export_name", "export")),
date_from=filter.date_from.strftime("%Y-%m-%d -")
if filter.date_from
else "up until",
date_to=filter.date_to.strftime("%Y-%m-%d"),
)
return response
return Response({**result, "next": next})
@cached_by_filters
def calculate_trends(self, request: request.Request) -> Dict[str, Any]:
team = self.team
filter = Filter(request=request, team=self.team)
if filter.insight == INSIGHT_STICKINESS or filter.shown_as == TRENDS_STICKINESS:
stickiness_filter = StickinessFilter(
request=request,
team=team,
get_earliest_timestamp=get_earliest_timestamp,
)
result = self.stickiness_query_class().run(stickiness_filter, team)
else:
trends_query = Trends()
result = trends_query.run(filter, team)
return {"result": result, "timezone": team.timezone}
# ******************************************
# /projects/:id/insights/funnel
# The funnel endpoint is asynchronously processed. When a request is received, the endpoint will
# call an async task with an id that can be continually polled for 3 minutes.
#
# params:
# - refresh: (dict) specifies cache to force refresh or poll
# - from_dashboard: (dict) determines funnel is being retrieved from dashboard item to update dashboard_item metadata
# - **shared filter types
# ******************************************
@extend_schema(
request=FunnelSerializer,
responses=OpenApiResponse(
response=FunnelStepsResultsSerializer,
description="Note, if funnel_viz_type is set the response will be different.",
),
methods=["POST"],
tags=["funnel"],
operation_id="Funnels",
)
@action(methods=["GET", "POST"], detail=False)
def funnel(self, request: request.Request, *args: Any, **kwargs: Any) -> Response:
try:
serializer = FunnelSerializer(request=request)
serializer.is_valid(raise_exception=True)
except Exception as e:
capture_exception(e)
try:
funnel = self.calculate_funnel(request)
except HogQLException as e:
raise ValidationError(str(e))
funnel["result"] = protect_old_clients_from_multi_property_default(
request.data, funnel["result"]
)
return Response(funnel)
@cached_by_filters
def calculate_funnel(self, request: request.Request) -> Dict[str, Any]:
team = self.team
filter = Filter(
request=request, data={"insight": INSIGHT_FUNNELS}, team=self.team
)
if filter.funnel_viz_type == FunnelVizType.TRENDS:
return {
"result": ClickhouseFunnelTrends(team=team, filter=filter).run(),
"timezone": team.timezone,
}
elif filter.funnel_viz_type == FunnelVizType.TIME_TO_CONVERT:
return {
"result": ClickhouseFunnelTimeToConvert(team=team, filter=filter).run(),
"timezone": team.timezone,
}
else:
funnel_order_class = get_funnel_order_class(filter)
return {
"result": funnel_order_class(team=team, filter=filter).run(),
"timezone": team.timezone,
}
# ******************************************
# /projects/:id/insights/retention
# params:
# - start_entity: (dict) specifies id and type of the entity to focus retention on
# - **shared filter types
# ******************************************
@action(methods=["GET"], detail=False)
def retention(
self, request: request.Request, *args: Any, **kwargs: Any
) -> Response:
try:
result = self.calculate_retention(request)
except HogQLException as e:
raise ValidationError(str(e))
return Response(result)
@cached_by_filters
def calculate_retention(self, request: request.Request) -> Dict[str, Any]:
team = self.team
data = {}
if not request.GET.get("date_from"):
data.update({"date_from": "-11d"})
filter = RetentionFilter(data=data, request=request, team=self.team)
base_uri = request.build_absolute_uri("/")
result = self.retention_query_class(base_uri=base_uri).run(filter, team)
return {"result": result, "timezone": team.timezone}
# ******************************************
# /projects/:id/insights/path
# params:
# - start: (string) specifies the name of the starting property or element
# - request_type: (string: $pageview, $autocapture, $screen, custom_event) specifies the path type
# - **shared filter types
# ******************************************
@action(methods=["GET", "POST"], detail=False)
def path(self, request: request.Request, *args: Any, **kwargs: Any) -> Response:
try:
result = self.calculate_path(request)
except HogQLException as e:
raise ValidationError(str(e))
return Response(result)
@cached_by_filters
def calculate_path(self, request: request.Request) -> Dict[str, Any]:
team = self.team
filter = PathFilter(
request=request, data={"insight": INSIGHT_PATHS}, team=self.team
)
funnel_filter = None
funnel_filter_data = request.GET.get("funnel_filter") or request.data.get(
"funnel_filter"
)
if funnel_filter_data:
if isinstance(funnel_filter_data, str):
funnel_filter_data = json.loads(funnel_filter_data)
funnel_filter = Filter(
data={"insight": INSIGHT_FUNNELS, **funnel_filter_data}, team=self.team
)
# backwards compatibility
if filter.path_type:
filter = filter.shallow_clone(
{PATHS_INCLUDE_EVENT_TYPES: [filter.path_type]}
)
resp = self.paths_query_class(
filter=filter, team=team, funnel_filter=funnel_filter
).run()
return {"result": resp, "timezone": team.timezone}
# ******************************************
# /projects/:id/insights/:short_id/viewed
# Creates or updates an InsightViewed object for the user/insight combo
# ******************************************
@action(methods=["POST"], detail=True)
def viewed(self, request: request.Request, *args: Any, **kwargs: Any) -> Response:
InsightViewed.objects.update_or_create(
team=self.team,
user=request.user,
insight=self.get_object(),
defaults={"last_viewed_at": now()},
)
return Response(status=status.HTTP_201_CREATED)
@action(methods=["GET"], url_path="activity", detail=False)
def all_activity(self, request: request.Request, **kwargs):
limit = int(request.query_params.get("limit", "10"))
page = int(request.query_params.get("page", "1"))
activity_page = load_activity(
scope="Insight", team_id=self.team_id, limit=limit, page=page
)
return activity_page_response(activity_page, limit, page, request)
@action(methods=["GET"], detail=True)
def activity(self, request: request.Request, **kwargs):
limit = int(request.query_params.get("limit", "10"))
page = int(request.query_params.get("page", "1"))
item_id = kwargs["pk"]
if not Insight.objects.filter(id=item_id, team_id=self.team_id).exists():
return Response("", status=status.HTTP_404_NOT_FOUND)
activity_page = load_activity(
scope="Insight",
team_id=self.team_id,
item_id=item_id,
limit=limit,
page=page,
)
return activity_page_response(activity_page, limit, page, request)
@action(methods=["POST"], detail=False)
def cancel(self, request: request.Request, **kwargs):
if "client_query_id" not in request.data:
raise serializers.ValidationError({"client_query_id": "Field is required."})
sync_execute(
f"KILL QUERY ON CLUSTER '{CLICKHOUSE_CLUSTER}' WHERE query_id LIKE %(client_query_id)s",
{"client_query_id": f"{self.team.pk}_{request.data['client_query_id']}%"},
)
statsd.incr(
"clickhouse.query.cancellation_requested", tags={"team_id": self.team.pk}
)
return Response(status=status.HTTP_201_CREATED)
@action(methods=["POST"], detail=False)
def timing(self, request: request.Request, **kwargs):
from posthog.kafka_client.client import KafkaProducer
from posthog.models.event.util import format_clickhouse_timestamp
from posthog.utils import cast_timestamp_or_now
if CAPTURE_TIME_TO_SEE_DATA:
payload = {
**request.data,
"team_id": self.team_id,
"user_id": self.request.user.pk,
"timestamp": format_clickhouse_timestamp(cast_timestamp_or_now(None)),
}
if "min_last_refresh" in payload:
payload["min_last_refresh"] = format_clickhouse_timestamp(
payload["min_last_refresh"]
)
if "max_last_refresh" in payload:
payload["max_last_refresh"] = format_clickhouse_timestamp(
payload["max_last_refresh"]
)
KafkaProducer().produce(topic=KAFKA_METRICS_TIME_TO_SEE_DATA, data=payload)
return Response(status=status.HTTP_201_CREATED)
class LegacyInsightViewSet(InsightViewSet):
legacy_team_compatibility = True
|
network | audio_source | #!/usr/bin/env python
#
# Copyright 2006,2007,2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import sys
from argparse import ArgumentParser
from gnuradio import blocks, gr
try:
from gnuradio import audio
except ImportError:
sys.stderr.write(
"Failed to import gnuradio.audio. Make sure gr-audio component is installed.\n"
)
sys.exit(1)
class audio_source(gr.top_block):
def __init__(self, host, port, pkt_size, sample_rate, eof):
gr.top_block.__init__(self, "audio_source")
self.audio = audio.source(sample_rate)
self.sink = blocks.udp_sink(gr.sizeof_float, host, port, pkt_size, eof=eof)
self.connect(self.audio, self.sink)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument(
"--host",
default="127.0.0.1",
help="Remote host name (domain name or IP address",
)
parser.add_argument(
"--port", type=int, default=65500, help="port number to connect to"
)
parser.add_argument("--packet-size", type=int, default=1472, help="packet size.")
parser.add_argument(
"-r",
"--sample-rate",
type=int,
default=32000,
help="audio signal sample rate [default=%(default)r]",
)
parser.add_argument(
"--no-eof",
action="store_true",
default=False,
help="don't send EOF on disconnect",
)
args = parser.parse_args()
# Create an instance of a hierarchical block
top_block = audio_source(
args.host, args.port, args.packet_size, args.sample_rate, not args.no_eof
)
try:
# Run forever
top_block.run()
except KeyboardInterrupt:
# Ctrl-C exits
pass
|
other | doxypy | #!/usr/bin/env python
__applicationName__ = "doxypy"
__blurb__ = """
doxypy is an input filter for Doxygen. It preprocesses python
files so that docstrings of classes and functions are reformatted
into Doxygen-conform documentation blocks.
"""
__doc__ = (
__blurb__
+ """
In order to make Doxygen preprocess files through doxypy, simply
add the following lines to your Doxyfile:
FILTER_SOURCE_FILES = YES
INPUT_FILTER = "python /path/to/doxypy.py"
"""
)
__version__ = "0.4.2"
__date__ = "5th December 2008"
__website__ = "http://code.foosel.org/doxypy"
__author__ = (
"Philippe 'demod' Neumann (doxypy at demod dot org)",
"Gina 'foosel' Haeussge (gina at foosel dot net)",
)
__licenseName__ = "GPL v2"
__license__ = """This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import sys
from argparse import ArgumentParser
class FSM(object):
"""Implements a finite state machine.
Transitions are given as 4-tuples, consisting of an origin state, a target
state, a condition for the transition (given as a reference to a function
which gets called with a given piece of input) and a pointer to a function
to be called upon the execution of the given transition.
"""
"""
@var transitions holds the transitions
@var current_state holds the current state
@var current_input holds the current input
@var current_transition hold the currently active transition
"""
def __init__(self, start_state=None, transitions=[]):
self.transitions = transitions
self.current_state = start_state
self.current_input = None
self.current_transition = None
def setStartState(self, state):
self.current_state = state
def addTransition(self, from_state, to_state, condition, callback):
self.transitions.append([from_state, to_state, condition, callback])
def makeTransition(self, input):
"""Makes a transition based on the given input.
@param input input to parse by the FSM
"""
for transition in self.transitions:
[from_state, to_state, condition, callback] = transition
if from_state == self.current_state:
match = condition(input)
if match:
self.current_state = to_state
self.current_input = input
self.current_transition = transition
if args.debug:
print(
"# FSM: executing (%s -> %s) for line '%s'"
% (from_state, to_state, input),
file=sys.stderr,
)
callback(match)
return
class Doxypy(object):
def __init__(self):
string_prefixes = "[uU]?[rR]?"
self.start_single_comment_re = re.compile(r"^\s*%s(''')" % string_prefixes)
self.end_single_comment_re = re.compile(r"(''')\s*$")
self.start_double_comment_re = re.compile(r'^\s*%s(""")' % string_prefixes)
self.end_double_comment_re = re.compile(r'(""")\s*$')
self.single_comment_re = re.compile(r"^\s*%s(''').*(''')\s*$" % string_prefixes)
self.double_comment_re = re.compile(r'^\s*%s(""").*(""")\s*$' % string_prefixes)
self.defclass_re = re.compile(r"^(\s*)(def .+:|class .+:)")
self.empty_re = re.compile(r"^\s*$")
self.hashline_re = re.compile(r"^\s*#.*$")
self.importline_re = re.compile(r"^\s*(import |from .+ import)")
self.multiline_defclass_start_re = re.compile(r"^(\s*)(def|class)(\s.*)?$")
self.multiline_defclass_end_re = re.compile(r":\s*$")
# Transition list format
# ["FROM", "TO", condition, action]
transitions = [
# FILEHEAD
# single line comments
[
"FILEHEAD",
"FILEHEAD",
self.single_comment_re.search,
self.appendCommentLine,
],
[
"FILEHEAD",
"FILEHEAD",
self.double_comment_re.search,
self.appendCommentLine,
],
# multiline comments
[
"FILEHEAD",
"FILEHEAD_COMMENT_SINGLE",
self.start_single_comment_re.search,
self.appendCommentLine,
],
[
"FILEHEAD_COMMENT_SINGLE",
"FILEHEAD",
self.end_single_comment_re.search,
self.appendCommentLine,
],
[
"FILEHEAD_COMMENT_SINGLE",
"FILEHEAD_COMMENT_SINGLE",
self.catchall,
self.appendCommentLine,
],
[
"FILEHEAD",
"FILEHEAD_COMMENT_DOUBLE",
self.start_double_comment_re.search,
self.appendCommentLine,
],
[
"FILEHEAD_COMMENT_DOUBLE",
"FILEHEAD",
self.end_double_comment_re.search,
self.appendCommentLine,
],
[
"FILEHEAD_COMMENT_DOUBLE",
"FILEHEAD_COMMENT_DOUBLE",
self.catchall,
self.appendCommentLine,
],
# other lines
["FILEHEAD", "FILEHEAD", self.empty_re.search, self.appendFileheadLine],
["FILEHEAD", "FILEHEAD", self.hashline_re.search, self.appendFileheadLine],
[
"FILEHEAD",
"FILEHEAD",
self.importline_re.search,
self.appendFileheadLine,
],
["FILEHEAD", "DEFCLASS", self.defclass_re.search, self.resetCommentSearch],
[
"FILEHEAD",
"DEFCLASS_MULTI",
self.multiline_defclass_start_re.search,
self.resetCommentSearch,
],
["FILEHEAD", "DEFCLASS_BODY", self.catchall, self.appendFileheadLine],
# DEFCLASS
# single line comments
[
"DEFCLASS",
"DEFCLASS_BODY",
self.single_comment_re.search,
self.appendCommentLine,
],
[
"DEFCLASS",
"DEFCLASS_BODY",
self.double_comment_re.search,
self.appendCommentLine,
],
# multiline comments
[
"DEFCLASS",
"COMMENT_SINGLE",
self.start_single_comment_re.search,
self.appendCommentLine,
],
[
"COMMENT_SINGLE",
"DEFCLASS_BODY",
self.end_single_comment_re.search,
self.appendCommentLine,
],
["COMMENT_SINGLE", "COMMENT_SINGLE", self.catchall, self.appendCommentLine],
[
"DEFCLASS",
"COMMENT_DOUBLE",
self.start_double_comment_re.search,
self.appendCommentLine,
],
[
"COMMENT_DOUBLE",
"DEFCLASS_BODY",
self.end_double_comment_re.search,
self.appendCommentLine,
],
["COMMENT_DOUBLE", "COMMENT_DOUBLE", self.catchall, self.appendCommentLine],
# other lines
["DEFCLASS", "DEFCLASS", self.empty_re.search, self.appendDefclassLine],
["DEFCLASS", "DEFCLASS", self.defclass_re.search, self.resetCommentSearch],
[
"DEFCLASS",
"DEFCLASS_MULTI",
self.multiline_defclass_start_re.search,
self.resetCommentSearch,
],
["DEFCLASS", "DEFCLASS_BODY", self.catchall, self.stopCommentSearch],
# DEFCLASS_BODY
[
"DEFCLASS_BODY",
"DEFCLASS",
self.defclass_re.search,
self.startCommentSearch,
],
[
"DEFCLASS_BODY",
"DEFCLASS_MULTI",
self.multiline_defclass_start_re.search,
self.startCommentSearch,
],
["DEFCLASS_BODY", "DEFCLASS_BODY", self.catchall, self.appendNormalLine],
# DEFCLASS_MULTI
[
"DEFCLASS_MULTI",
"DEFCLASS",
self.multiline_defclass_end_re.search,
self.appendDefclassLine,
],
[
"DEFCLASS_MULTI",
"DEFCLASS_MULTI",
self.catchall,
self.appendDefclassLine,
],
]
self.fsm = FSM("FILEHEAD", transitions)
self.outstream = sys.stdout
self.output = []
self.comment = []
self.filehead = []
self.defclass = []
self.indent = ""
def __closeComment(self):
"""Appends any open comment block and triggering block to the output."""
if args.autobrief:
if len(self.comment) == 1 or (
len(self.comment) > 2 and self.comment[1].strip() == ""
):
self.comment[0] = self.__docstringSummaryToBrief(self.comment[0])
if self.comment:
block = self.makeCommentBlock()
self.output.extend(block)
if self.defclass:
self.output.extend(self.defclass)
def __docstringSummaryToBrief(self, line):
"""Adds \\brief to the docstrings summary line.
A \\brief is prepended, provided no other doxygen command is at the
start of the line.
"""
stripped = line.strip()
if stripped and not stripped[0] in ("@", "\\"):
return "\\brief " + line
else:
return line
def __flushBuffer(self):
"""Flushes the current outputbuffer to the outstream."""
if self.output:
try:
if args.debug:
print("# OUTPUT: ", self.output, file=sys.stderr)
print("\n".join(self.output), file=self.outstream)
self.outstream.flush()
except IOError:
# Fix for FS#33. Catches "broken pipe" when doxygen closes
# stdout prematurely upon usage of INPUT_FILTER, INLINE_SOURCES
# and FILTER_SOURCE_FILES.
pass
self.output = []
def catchall(self, input):
"""The catchall-condition, always returns true."""
return True
def resetCommentSearch(self, match):
"""Restarts a new comment search for a different triggering line.
Closes the current commentblock and starts a new comment search.
"""
if args.debug:
print("# CALLBACK: resetCommentSearch", file=sys.stderr)
self.__closeComment()
self.startCommentSearch(match)
def startCommentSearch(self, match):
"""Starts a new comment search.
Saves the triggering line, resets the current comment and saves
the current indentation.
"""
if args.debug:
print("# CALLBACK: startCommentSearch", file=sys.stderr)
self.defclass = [self.fsm.current_input]
self.comment = []
self.indent = match.group(1)
def stopCommentSearch(self, match):
"""Stops a comment search.
Closes the current commentblock, resets the triggering line and
appends the current line to the output.
"""
if args.debug:
print("# CALLBACK: stopCommentSearch", file=sys.stderr)
self.__closeComment()
self.defclass = []
self.output.append(self.fsm.current_input)
def appendFileheadLine(self, match):
"""Appends a line in the FILEHEAD state.
Closes the open comment block, resets it and appends the current line.
"""
if args.debug:
print("# CALLBACK: appendFileheadLine", file=sys.stderr)
self.__closeComment()
self.comment = []
self.output.append(self.fsm.current_input)
def appendCommentLine(self, match):
"""Appends a comment line.
The comment delimiter is removed from multiline start and ends as
well as singleline comments.
"""
if args.debug:
print("# CALLBACK: appendCommentLine", file=sys.stderr)
(from_state, to_state, condition, callback) = self.fsm.current_transition
# single line comment
if (from_state == "DEFCLASS" and to_state == "DEFCLASS_BODY") or (
from_state == "FILEHEAD" and to_state == "FILEHEAD"
):
# remove comment delimiter from begin and end of the line
activeCommentDelim = match.group(1)
line = self.fsm.current_input
self.comment.append(
line[
line.find(activeCommentDelim)
+ len(activeCommentDelim) : line.rfind(activeCommentDelim)
]
)
if to_state == "DEFCLASS_BODY":
self.__closeComment()
self.defclass = []
# multiline start
elif from_state == "DEFCLASS" or from_state == "FILEHEAD":
# remove comment delimiter from begin of the line
activeCommentDelim = match.group(1)
line = self.fsm.current_input
self.comment.append(
line[line.find(activeCommentDelim) + len(activeCommentDelim) :]
)
# multiline end
elif to_state == "DEFCLASS_BODY" or to_state == "FILEHEAD":
# remove comment delimiter from end of the line
activeCommentDelim = match.group(1)
line = self.fsm.current_input
self.comment.append(line[0 : line.rfind(activeCommentDelim)])
if to_state == "DEFCLASS_BODY":
self.__closeComment()
self.defclass = []
# in multiline comment
else:
# just append the comment line
self.comment.append(self.fsm.current_input)
def appendNormalLine(self, match):
"""Appends a line to the output."""
if args.debug:
print("# CALLBACK: appendNormalLine", file=sys.stderr)
self.output.append(self.fsm.current_input)
def appendDefclassLine(self, match):
"""Appends a line to the triggering block."""
if args.debug:
print("# CALLBACK: appendDefclassLine", file=sys.stderr)
self.defclass.append(self.fsm.current_input)
def makeCommentBlock(self):
"""Indents the current comment block with respect to the current
indentation level.
@returns a list of indented comment lines
"""
doxyStart = "##"
commentLines = self.comment
commentLines = ["%s# %s" % (self.indent, x) for x in commentLines]
l = [self.indent + doxyStart]
l.extend(commentLines)
return l
def parse(self, input):
"""Parses a python file given as input string and returns the doxygen-
compatible representation.
@param input the python code to parse
@returns the modified python code
"""
lines = input.split("\n")
for line in lines:
self.fsm.makeTransition(line)
if self.fsm.current_state == "DEFCLASS":
self.__closeComment()
return "\n".join(self.output)
def parseFile(self, filename):
"""Parses a python file given as input string and returns the doxygen-
compatible representation.
@param input the python code to parse
@returns the modified python code
"""
f = open(filename, "r")
for line in f:
self.parseLine(line.rstrip("\r\n"))
if self.fsm.current_state == "DEFCLASS":
self.__closeComment()
self.__flushBuffer()
f.close()
def parseLine(self, line):
"""Parse one line of python and flush the resulting output to the
outstream.
@param line the python code line to parse
"""
self.fsm.makeTransition(line)
self.__flushBuffer()
def argParse():
"""Parses commandline args."""
parser = ArgumentParser(prog=__applicationName__)
parser.add_argument(
"--version", action="version", version="%(prog)s " + __version__
)
parser.add_argument(
"--autobrief",
action="store_true",
help="use the docstring summary line as \\brief description",
)
parser.add_argument(
"--debug", action="store_true", help="enable debug output on stderr"
)
parser.add_argument("filename", metavar="FILENAME")
return parser.parse_args()
def main():
"""Starts the parser on the file given by the filename as the first
argument on the commandline.
"""
global args
args = argParse()
fsm = Doxypy()
fsm.parseFile(args.filename)
if __name__ == "__main__":
main()
|
gpodder | jsonconfig | # -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2018 The gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# jsonconfig.py -- JSON Config Backend
# Thomas Perl <thp@gpodder.org> 2012-01-18
#
import copy
import json
from functools import reduce
class JsonConfigSubtree(object):
def __init__(self, parent, name):
self._parent = parent
self._name = name
def __repr__(self):
return "<Subtree %r of JsonConfig>" % (self._name,)
def _attr(self, name):
return ".".join((self._name, name))
def __getitem__(self, name):
return self._parent._lookup(self._name).__getitem__(name)
def __delitem__(self, name):
self._parent._lookup(self._name).__delitem__(name)
def __setitem__(self, name, value):
self._parent._lookup(self._name).__setitem__(name, value)
def __getattr__(self, name):
if name == "keys":
# Kludge for using dict() on a JsonConfigSubtree
return getattr(self._parent._lookup(self._name), name)
return getattr(self._parent, self._attr(name))
def __setattr__(self, name, value):
if name.startswith("_"):
object.__setattr__(self, name, value)
else:
self._parent.__setattr__(self._attr(name), value)
class JsonConfig(object):
_INDENT = 2
def __init__(self, data=None, default=None, on_key_changed=None):
"""
Create a new JsonConfig object
data: A JSON string that contains the data to load (optional)
default: A dict that contains default config values (optional)
on_key_changed: Callback when a value changes (optional)
The signature of on_key_changed looks like this:
func(name, old_value, new_value)
name: The key name, e.g. "ui.gtk.toolbar"
old_value: The old value, e.g. False
new_value: The new value, e.g. True
For newly-set keys, on_key_changed is also called. In this case,
None will be the old_value:
>>> def callback(*args): print('callback:', args)
>>> c = JsonConfig(on_key_changed=callback)
>>> c.a.b = 10
callback: ('a.b', None, 10)
>>> c.a.b = 11
callback: ('a.b', 10, 11)
>>> c.x.y.z = [1,2,3]
callback: ('x.y.z', None, [1, 2, 3])
>>> c.x.y.z = 42
callback: ('x.y.z', [1, 2, 3], 42)
Please note that dict-style access will not call on_key_changed:
>>> def callback(*args): print('callback:', args)
>>> c = JsonConfig(on_key_changed=callback)
>>> c.a.b = 1 # This works as expected
callback: ('a.b', None, 1)
>>> c.a['c'] = 10 # This doesn't call on_key_changed!
>>> del c.a['c'] # This also doesn't call on_key_changed!
"""
self._default = default
self._data = copy.deepcopy(self._default) or {}
self._on_key_changed = on_key_changed
if data is not None:
self._restore(data)
def _restore(self, backup):
"""
Restore a previous state saved with repr()
This function allows you to "snapshot" the current values of
the configuration and reload them later on. Any missing
default values will be added on top of the restored config.
Returns True if new keys from the default config have been added,
False if no keys have been added (backup contains all default keys)
>>> c = JsonConfig()
>>> c.a.b = 10
>>> backup = repr(c)
>>> print(c.a.b)
10
>>> c.a.b = 11
>>> print(c.a.b)
11
>>> c._restore(backup)
False
>>> print(c.a.b)
10
"""
self._data = json.loads(backup)
# Add newly-added default configuration options
if self._default is not None:
return self._merge_keys(self._default)
return False
def _merge_keys(self, merge_source):
"""Merge keys from merge_source into this config object
Return True if new keys were merged, False otherwise
"""
added_new_key = False
# Recurse into the data and add missing items
work_queue = [(self._data, merge_source)]
while work_queue:
data, default = work_queue.pop()
for key, value in default.items():
if key not in data:
# Copy defaults for missing key
data[key] = copy.deepcopy(value)
added_new_key = True
elif isinstance(value, dict):
# Recurse into sub-dictionaries
work_queue.append((data[key], value))
elif type(value) != type(data[key]): # noqa
# Type mismatch of current value and default
if isinstance(value, int) and isinstance(data[key], float):
# Convert float to int if default value is int
data[key] = int(data[key])
return added_new_key
def __repr__(self):
"""
>>> c = JsonConfig('{"a": 1}')
>>> print(c)
{
"a": 1
}
"""
return json.dumps(self._data, indent=self._INDENT, sort_keys=True)
def _lookup(self, name):
return reduce(lambda d, k: d[k], name.split("."), self._data)
def _keys_iter(self):
work_queue = []
work_queue.append(([], self._data))
while work_queue:
path, data = work_queue.pop(0)
if isinstance(data, dict):
for key in sorted(data.keys()):
work_queue.append((path + [key], data[key]))
else:
yield ".".join(path)
def __getattr__(self, name):
try:
value = self._lookup(name)
if not isinstance(value, dict):
return value
except KeyError:
pass
return JsonConfigSubtree(self, name)
def __setattr__(self, name, value):
if name.startswith("_"):
object.__setattr__(self, name, value)
return
attrs = name.split(".")
target_dict = self._data
while attrs:
attr = attrs.pop(0)
if not attrs:
old_value = target_dict.get(attr, None)
if old_value != value or attr not in target_dict:
target_dict[attr] = value
if self._on_key_changed is not None:
self._on_key_changed(name, old_value, value)
break
target = target_dict.get(attr, None)
if target is None or not isinstance(target, dict):
target_dict[attr] = target = {}
target_dict = target
|
decrypters | LixIn | # -*- coding: utf-8 -*-
import re
import urllib.parse
from ..base.decrypter import BaseDecrypter
class LixIn(BaseDecrypter):
__name__ = "LixIn"
__type__ = "decrypter"
__version__ = "0.28"
__status__ = "testing"
__pattern__ = r"http://(?:www\.)?lix\.in/(?P<ID>.+)"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
(
"folder_per_package",
"Default;Yes;No",
"Create folder for each package",
"Default",
),
]
__description__ = """Lix.in decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("spoob", "spoob@pyload.net")]
CAPTCHA_PATTERN = r'<img src="(captcha_img\.php\?.*?)"'
SUBMIT_PATTERN = r"value=\'continue.*?\'"
LINK_PATTERN = r'name="ifram" src="(.*?)"'
def decrypt(self, pyfile):
url = pyfile.url
m = re.match(self.__pattern__, url)
if m is None:
self.error(self._("Unable to identify file ID"))
id = m.group("ID")
self.log_debug(f"File id is {id}")
self.data = self.load(url)
m = re.search(self.SUBMIT_PATTERN, self.data)
if m is None:
self.error(self._("Link doesn't seem valid"))
m = re.search(self.CAPTCHA_PATTERN, self.data)
if m is not None:
captcharesult = self.captcha.decrypt(
urllib.parse.urljoin("http://lix.in/", m.group(1))
)
self.data = self.load(
url, post={"capt": captcharesult, "submit": "submit", "tiny": id}
)
if re.search(self.CAPTCHA_PATTERN, self.data):
self.fail(self._("No captcha solved"))
else:
self.data = self.load(url, post={"submit": "submit", "tiny": id})
m = re.search(self.LINK_PATTERN, self.data)
if m is None:
self.error(self._("Unable to find destination url"))
else:
self.links = [m.group(1)]
self.log_debug(f"Found link {self.links[0]}, adding to package")
|
-development | helpers_locale | import os
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd317756(v=vs.85).aspx
windows_codecs = {
"cp1252", # Standard Windows
"cp1251", # Russian
"cp037",
"cp424",
"cp437",
"cp500",
"cp720",
"cp737",
"cp775",
"cp850",
"cp852",
"cp855",
"cp856",
"cp857",
"cp858",
"cp860",
"cp861",
"cp862",
"cp863",
"cp864",
"cp865",
"cp866",
"cp869",
"cp874",
"cp875",
"cp932",
"cp949",
"cp950",
"cp1006",
"cp1026",
"cp1140",
"cp1250",
"cp1253",
"cp1254",
"cp1255",
"cp1256",
"cp1257",
"cp1258",
}
linux_codecs = {
"utf_8", # Generic Linux/Mac
}
mac_codecs = [
"utf_8", # Generic Linux/Mac
"mac_cyrillic",
"mac_greek",
"mac_iceland",
"mac_latin2",
"mac_roman",
"mac_turkish",
]
universal_codecs = [
"utf_16",
"utf_32",
"utf_32_be",
"utf_32_le",
"utf_16_be",
"utf_16_le",
"utf_7",
"utf_8_sig",
]
other_codecs = [
"scii",
"big5",
"big5hkscs",
"euc_jp",
"euc_jis_2004",
"euc_jisx0213",
"euc_kr",
"gb2312",
"gbk",
"gb18030",
"hz",
"iso2022_jp",
"iso2022_jp_1",
"iso2022_jp_2",
"iso2022_jp_2004",
"iso2022_jp_3",
"iso2022_jp_ext",
"iso2022_kr",
"latin_1",
"iso8859_2",
"iso8859_3",
"iso8859_4",
"iso8859_5",
"iso8859_6",
"iso8859_7",
"iso8859_8",
"iso8859_9",
"iso8859_10",
"iso8859_11",
"iso8859_13",
"iso8859_14",
"iso8859_15",
"iso8859_16",
"johab",
"koi8_r",
"koi8_u",
"ptcp154",
"shift_jis",
"shift_jis_2004",
"shift_jisx0213",
]
system_names = {
"Windows": windows_codecs,
"Linux": linux_codecs,
"Darwin": mac_codecs,
}
def GetPath(root, file=None, codec=None):
# Replace this with the function we actually use for this
path = os.path.realpath(os.path.abspath(root))
if file:
path = os.path.join(path, file)
if codec:
path = path.decode(codec)
return path
def GetUnicodePath(root, file=None, codec=None):
# Replace this with the function we actually use for this
path = os.path.realpath(os.path.abspath(root))
if file:
path = os.path.join(path, file)
if codec:
path = str(path, codec)
else:
path = str(path)
return path
|
extractor | xiami | # coding: utf-8
from __future__ import unicode_literals
from ..compat import compat_urllib_parse_unquote
from ..utils import int_or_none
from .common import InfoExtractor
class XiamiBaseIE(InfoExtractor):
_API_BASE_URL = "https://emumo.xiami.com/song/playlist/cat/json/id"
def _download_webpage_handle(self, *args, **kwargs):
webpage = super(XiamiBaseIE, self)._download_webpage_handle(*args, **kwargs)
if ">Xiami is currently not available in your country.<" in webpage:
self.raise_geo_restricted(
"Xiami is currently not available in your country"
)
return webpage
def _extract_track(self, track, track_id=None):
track_name = track.get("songName") or track.get("name") or track["subName"]
artist = track.get("artist") or track.get("artist_name") or track.get("singers")
title = "%s - %s" % (artist, track_name) if artist else track_name
track_url = self._decrypt(track["location"])
subtitles = {}
lyrics_url = track.get("lyric_url") or track.get("lyric")
if lyrics_url and lyrics_url.startswith("http"):
subtitles["origin"] = [{"url": lyrics_url}]
return {
"id": track.get("song_id") or track_id,
"url": track_url,
"title": title,
"thumbnail": track.get("pic") or track.get("album_pic"),
"duration": int_or_none(track.get("length")),
"creator": track.get("artist", "").split(";")[0],
"track": track_name,
"track_number": int_or_none(track.get("track")),
"album": track.get("album_name") or track.get("title"),
"artist": artist,
"subtitles": subtitles,
}
def _extract_tracks(self, item_id, referer, typ=None):
playlist = self._download_json(
"%s/%s%s" % (self._API_BASE_URL, item_id, "/type/%s" % typ if typ else ""),
item_id,
headers={
"Referer": referer,
},
)
return [
self._extract_track(track, item_id)
for track in playlist["data"]["trackList"]
]
@staticmethod
def _decrypt(origin):
n = int(origin[0])
origin = origin[1:]
short_length = len(origin) // n
long_num = len(origin) - short_length * n
l = tuple()
for i in range(0, n):
length = short_length
if i < long_num:
length += 1
l += (origin[0:length],)
origin = origin[length:]
ans = ""
for i in range(0, short_length + 1):
for j in range(0, n):
if len(l[j]) > i:
ans += l[j][i]
return compat_urllib_parse_unquote(ans).replace("^", "0")
class XiamiSongIE(XiamiBaseIE):
IE_NAME = "xiami:song"
IE_DESC = "虾米音乐"
_VALID_URL = r"https?://(?:www\.)?xiami\.com/song/(?P<id>[^/?#&]+)"
_TESTS = [
{
"url": "http://www.xiami.com/song/1775610518",
"md5": "521dd6bea40fd5c9c69f913c232cb57e",
"info_dict": {
"id": "1775610518",
"ext": "mp3",
"title": "HONNE - Woman",
"thumbnail": r"re:http://img\.xiami\.net/images/album/.*\.jpg",
"duration": 265,
"creator": "HONNE",
"track": "Woman",
"album": "Woman",
"artist": "HONNE",
"subtitles": {
"origin": [
{
"ext": "lrc",
}
],
},
},
"skip": "Georestricted",
},
{
"url": "http://www.xiami.com/song/1775256504",
"md5": "932a3abd45c6aa2b1fdbe028fcb4c4fc",
"info_dict": {
"id": "1775256504",
"ext": "mp3",
"title": "戴荃 - 悟空",
"thumbnail": r"re:http://img\.xiami\.net/images/album/.*\.jpg",
"duration": 200,
"creator": "戴荃",
"track": "悟空",
"album": "悟空",
"artist": "戴荃",
"subtitles": {
"origin": [
{
"ext": "lrc",
}
],
},
},
"skip": "Georestricted",
},
{
"url": "http://www.xiami.com/song/1775953850",
"info_dict": {
"id": "1775953850",
"ext": "mp3",
"title": "До Скону - Чума Пожирает Землю",
"thumbnail": r"re:http://img\.xiami\.net/images/album/.*\.jpg",
"duration": 683,
"creator": "До Скону",
"track": "Чума Пожирает Землю",
"track_number": 7,
"album": "Ад",
"artist": "До Скону",
},
"params": {
"skip_download": True,
},
},
{
"url": "http://www.xiami.com/song/xLHGwgd07a1",
"only_matching": True,
},
]
def _real_extract(self, url):
return self._extract_tracks(self._match_id(url), url)[0]
class XiamiPlaylistBaseIE(XiamiBaseIE):
def _real_extract(self, url):
item_id = self._match_id(url)
return self.playlist_result(
self._extract_tracks(item_id, url, self._TYPE), item_id
)
class XiamiAlbumIE(XiamiPlaylistBaseIE):
IE_NAME = "xiami:album"
IE_DESC = "虾米音乐 - 专辑"
_VALID_URL = r"https?://(?:www\.)?xiami\.com/album/(?P<id>[^/?#&]+)"
_TYPE = "1"
_TESTS = [
{
"url": "http://www.xiami.com/album/2100300444",
"info_dict": {
"id": "2100300444",
},
"playlist_count": 10,
"skip": "Georestricted",
},
{
"url": "http://www.xiami.com/album/512288?spm=a1z1s.6843761.1110925389.6.hhE9p9",
"only_matching": True,
},
{
"url": "http://www.xiami.com/album/URVDji2a506",
"only_matching": True,
},
]
class XiamiArtistIE(XiamiPlaylistBaseIE):
IE_NAME = "xiami:artist"
IE_DESC = "虾米音乐 - 歌手"
_VALID_URL = r"https?://(?:www\.)?xiami\.com/artist/(?P<id>[^/?#&]+)"
_TYPE = "2"
_TESTS = [
{
"url": "http://www.xiami.com/artist/2132?spm=0.0.0.0.dKaScp",
"info_dict": {
"id": "2132",
},
"playlist_count": 20,
"skip": "Georestricted",
},
{
"url": "http://www.xiami.com/artist/bC5Tk2K6eb99",
"only_matching": True,
},
]
class XiamiCollectionIE(XiamiPlaylistBaseIE):
IE_NAME = "xiami:collection"
IE_DESC = "虾米音乐 - 精选集"
_VALID_URL = r"https?://(?:www\.)?xiami\.com/collect/(?P<id>[^/?#&]+)"
_TYPE = "3"
_TEST = {
"url": "http://www.xiami.com/collect/156527391?spm=a1z1s.2943601.6856193.12.4jpBnr",
"info_dict": {
"id": "156527391",
},
"playlist_mincount": 29,
"skip": "Georestricted",
}
|
Surface | InitGui | # -*- coding: utf8 -*-
# ***************************************************************************
# * Copyright (c) 2014 Nathan Miller <Nathan.A.Mill@gmail.com> *
# * Copyright (c) 2014 Balázs Bámer *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This library is free software; you can redistribute it and/or *
# * modify it under the terms of the GNU Library General Public *
# * License as published by the Free Software Foundation; either *
# * version 2 of the License, or (at your option) any later version. *
# * *
# * This library is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this library; see the file COPYING.LIB. If not, *
# * write to the Free Software Foundation, Inc., 59 Temple Place, *
# * Suite 330, Boston, MA 02111-1307, USA *
# * *
# ***************************************************************************
"""The Surface Workbench GUI initialization."""
import os
import FreeCAD as App
import FreeCADGui as Gui
class SurfaceWorkbench(Gui.Workbench):
"""Surface workbench object."""
Icon = os.path.join(
App.getResourceDir(),
"Mod",
"Surface",
"Resources",
"icons",
"Surface_Workbench.svg",
)
MenuText = "Surface"
ToolTip = "Surface workbench: Create and edit complex surfaces"
def Initialize(self):
"""Initialize the module."""
import Surface
import SurfaceGui
def GetClassName(self):
return "SurfaceGui::Workbench"
Gui.addWorkbench(SurfaceWorkbench())
|
borg | repository | import errno
import mmap
import os
import shutil
import stat
import struct
import time
from binascii import unhexlify
from collections import defaultdict
from configparser import ConfigParser
from datetime import datetime, timezone
from functools import partial
from itertools import islice
from .checksums import StreamingXXH64, crc32
from .constants import * # NOQA
from .crypto.file_integrity import FileIntegrityError, IntegrityCheckedFile
from .hashindex import NSIndex, NSIndex1, NSIndexEntry, hashindex_variant
from .helpers import (
Error,
ErrorWithTraceback,
IntegrityError,
Location,
ProgressIndicatorPercent,
bin_to_hex,
format_file_size,
msgpack,
parse_file_size,
safe_unlink,
secure_erase,
)
from .helpers.lrucache import LRUCache
from .locking import Lock, LockError, LockErrorT
from .logger import create_logger
from .manifest import Manifest
from .platform import SaveFile, SyncFile, safe_fadvise, sync_dir
from .repoobj import RepoObj
logger = create_logger(__name__)
MAGIC = b"BORG_SEG"
MAGIC_LEN = len(MAGIC)
TAG_PUT = 0
TAG_DELETE = 1
TAG_COMMIT = 2
TAG_PUT2 = 3
# Highest ID usable as TAG_* value
#
# Code may expect not to find any tags exceeding this value. In particular,
# in order to speed up `borg check --repair`, any tag greater than MAX_TAG_ID
# is assumed to be corrupted. When increasing this value, in order to add more
# tags, keep in mind that old versions of Borg accessing a new repository
# may not be able to handle the new tags.
MAX_TAG_ID = 15
FreeSpace = partial(defaultdict, int)
def header_size(tag):
if tag == TAG_PUT2:
size = LoggedIO.HEADER_ID_SIZE + LoggedIO.ENTRY_HASH_SIZE
elif tag == TAG_PUT or tag == TAG_DELETE:
size = LoggedIO.HEADER_ID_SIZE
elif tag == TAG_COMMIT:
size = LoggedIO.header_fmt.size
else:
raise ValueError(f"unsupported tag: {tag!r}")
return size
class Repository:
"""
Filesystem based transactional key value store
Transactionality is achieved by using a log (aka journal) to record changes. The log is a series of numbered files
called segments. Each segment is a series of log entries. The segment number together with the offset of each
entry relative to its segment start establishes an ordering of the log entries. This is the "definition" of
time for the purposes of the log.
Log entries are either PUT, DELETE or COMMIT.
A COMMIT is always the final log entry in a segment and marks all data from the beginning of the log until the
segment ending with the COMMIT as committed and consistent. The segment number of a segment ending with a COMMIT
is called the transaction ID of that commit, and a segment ending with a COMMIT is called committed.
When reading from a repository it is first checked whether the last segment is committed. If it is not, then
all segments after the last committed segment are deleted; they contain log entries whose consistency is not
established by a COMMIT.
Note that the COMMIT can't establish consistency by itself, but only manages to do so with proper support from
the platform (including the hardware). See platform.base.SyncFile for details.
A PUT inserts a key-value pair. The value is stored in the log entry, hence the repository implements
full data logging, meaning that all data is consistent, not just metadata (which is common in file systems).
A DELETE marks a key as deleted.
For a given key only the last entry regarding the key, which is called current (all other entries are called
superseded), is relevant: If there is no entry or the last entry is a DELETE then the key does not exist.
Otherwise the last PUT defines the value of the key.
By superseding a PUT (with either another PUT or a DELETE) the log entry becomes obsolete. A segment containing
such obsolete entries is called sparse, while a segment containing no such entries is called compact.
Sparse segments can be compacted and thereby disk space freed. This destroys the transaction for which the
superseded entries where current.
On disk layout:
dir/README
dir/config
dir/data/<X // SEGMENTS_PER_DIR>/<X>
dir/index.X
dir/hints.X
File system interaction
-----------------------
LoggedIO generally tries to rely on common behaviours across transactional file systems.
Segments that are deleted are truncated first, which avoids problems if the FS needs to
allocate space to delete the dirent of the segment. This mostly affects CoW file systems,
traditional journaling file systems have a fairly good grip on this problem.
Note that deletion, i.e. unlink(2), is atomic on every file system that uses inode reference
counts, which includes pretty much all of them. To remove a dirent the inodes refcount has
to be decreased, but you can't decrease the refcount before removing the dirent nor can you
decrease the refcount after removing the dirent. File systems solve this with a lock,
and by ensuring it all stays within the same FS transaction.
Truncation is generally not atomic in itself, and combining truncate(2) and unlink(2) is of
course never guaranteed to be atomic. Truncation in a classic extent-based FS is done in
roughly two phases, first the extents are removed then the inode is updated. (In practice
this is of course way more complex).
LoggedIO gracefully handles truncate/unlink splits as long as the truncate resulted in
a zero length file. Zero length segments are considered not to exist, while LoggedIO.cleanup()
will still get rid of them.
"""
class DoesNotExist(Error):
"""Repository {} does not exist."""
class AlreadyExists(Error):
"""A repository already exists at {}."""
class PathAlreadyExists(Error):
"""There is already something at {}."""
class ParentPathDoesNotExist(Error):
"""The parent path of the repo directory [{}] does not exist."""
class InvalidRepository(Error):
"""{} is not a valid repository. Check repo config."""
class InvalidRepositoryConfig(Error):
"""{} does not have a valid configuration. Check repo config [{}]."""
class CheckNeeded(ErrorWithTraceback):
"""Inconsistency detected. Please run "borg check {}"."""
class ObjectNotFound(ErrorWithTraceback):
"""Object with key {} not found in repository {}."""
def __init__(self, id, repo):
if isinstance(id, bytes):
id = bin_to_hex(id)
super().__init__(id, repo)
class InsufficientFreeSpaceError(Error):
"""Insufficient free space to complete transaction (required: {}, available: {})."""
class StorageQuotaExceeded(Error):
"""The storage quota ({}) has been exceeded ({}). Try deleting some archives."""
def __init__(
self,
path,
create=False,
exclusive=False,
lock_wait=None,
lock=True,
append_only=False,
storage_quota=None,
make_parent_dirs=False,
send_log_cb=None,
):
self.path = os.path.abspath(path)
self._location = Location("file://%s" % self.path)
self.version = None
# long-running repository methods which emit log or progress output are responsible for calling
# the ._send_log method periodically to get log and progress output transferred to the borg client
# in a timely manner, in case we have a RemoteRepository.
# for local repositories ._send_log can be called also (it will just do nothing in that case).
self._send_log = send_log_cb or (lambda: None)
self.io = None # type: LoggedIO
self.lock = None
self.index = None
# This is an index of shadowed log entries during this transaction. Consider the following sequence:
# segment_n PUT A, segment_x DELETE A
# After the "DELETE A" in segment_x the shadow index will contain "A -> [n]".
# .delete() is updating this index, it is persisted into "hints" file and is later used by .compact_segments().
self.shadow_index = {}
self._active_txn = False
self.lock_wait = lock_wait
self.do_lock = lock
self.do_create = create
self.created = False
self.exclusive = exclusive
self.append_only = append_only
self.storage_quota = storage_quota
self.storage_quota_use = 0
self.transaction_doomed = None
self.make_parent_dirs = make_parent_dirs
# v2 is the default repo version for borg 2.0
# v1 repos must only be used in a read-only way, e.g. for
# --other-repo=V1_REPO with borg init and borg transfer!
self.acceptable_repo_versions = (1, 2)
def __del__(self):
if self.lock:
self.close()
assert False, "cleanup happened in Repository.__del__"
def __repr__(self):
return f"<{self.__class__.__name__} {self.path}>"
def __enter__(self):
if self.do_create:
self.do_create = False
self.create(self.path)
self.created = True
self.open(self.path, bool(self.exclusive), lock_wait=self.lock_wait, lock=self.do_lock)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
no_space_left_on_device = exc_type is OSError and exc_val.errno == errno.ENOSPC
# The ENOSPC could have originated somewhere else besides the Repository. The cleanup is always safe, unless
# EIO or FS corruption ensues, which is why we specifically check for ENOSPC.
if self._active_txn and no_space_left_on_device:
logger.warning("No space left on device, cleaning up partial transaction to free space.")
cleanup = True
else:
cleanup = False
self._rollback(cleanup=cleanup)
self.close()
@property
def id_str(self):
return bin_to_hex(self.id)
@staticmethod
def is_repository(path):
"""Check whether there is already a Borg repository at *path*."""
try:
# Use binary mode to avoid troubles if a README contains some stuff not in our locale
with open(os.path.join(path, "README"), "rb") as fd:
# Read only the first ~100 bytes (if any), in case some README file we stumble upon is large.
readme_head = fd.read(100)
# The first comparison captures our current variant (REPOSITORY_README), the second comparison
# is an older variant of the README file (used by 1.0.x).
return b"Borg Backup repository" in readme_head or b"Borg repository" in readme_head
except OSError:
# Ignore FileNotFound, PermissionError, ...
return False
def check_can_create_repository(self, path):
"""
Raise an exception if a repository already exists at *path* or any parent directory.
Checking parent directories is done for two reasons:
(1) It's just a weird thing to do, and usually not intended. A Borg using the "parent" repository
may be confused, or we may accidentally put stuff into the "data/" or "data/<n>/" directories.
(2) When implementing repository quotas (which we currently don't), it's important to prohibit
folks from creating quota-free repositories. Since no one can create a repository within another
repository, user's can only use the quota'd repository, when their --restrict-to-path points
at the user's repository.
"""
try:
st = os.stat(path)
except FileNotFoundError:
pass # nothing there!
else:
# there is something already there!
if self.is_repository(path):
raise self.AlreadyExists(path)
if not stat.S_ISDIR(st.st_mode) or os.listdir(path):
raise self.PathAlreadyExists(path)
# an empty directory is acceptable for us.
while True:
# Check all parent directories for Borg's repository README
previous_path = path
# Thus, path = previous_path/..
path = os.path.abspath(os.path.join(previous_path, os.pardir))
if path == previous_path:
# We reached the root of the directory hierarchy (/.. = / and C:\.. = C:\).
break
if self.is_repository(path):
raise self.AlreadyExists(path)
def create(self, path):
"""Create a new empty repository at `path`"""
self.check_can_create_repository(path)
if self.make_parent_dirs:
parent_path = os.path.join(path, os.pardir)
os.makedirs(parent_path, exist_ok=True)
if not os.path.exists(path):
try:
os.mkdir(path)
except FileNotFoundError as err:
raise self.ParentPathDoesNotExist(path) from err
with open(os.path.join(path, "README"), "w") as fd:
fd.write(REPOSITORY_README)
os.mkdir(os.path.join(path, "data"))
config = ConfigParser(interpolation=None)
config.add_section("repository")
self.version = 2
config.set("repository", "version", str(self.version))
config.set("repository", "segments_per_dir", str(DEFAULT_SEGMENTS_PER_DIR))
config.set("repository", "max_segment_size", str(DEFAULT_MAX_SEGMENT_SIZE))
config.set("repository", "append_only", str(int(self.append_only)))
if self.storage_quota:
config.set("repository", "storage_quota", str(self.storage_quota))
else:
config.set("repository", "storage_quota", "0")
config.set("repository", "additional_free_space", "0")
config.set("repository", "id", bin_to_hex(os.urandom(32)))
self.save_config(path, config)
def save_config(self, path, config):
config_path = os.path.join(path, "config")
old_config_path = os.path.join(path, "config.old")
if os.path.isfile(old_config_path):
logger.warning("Old config file not securely erased on previous config update")
secure_erase(old_config_path, avoid_collateral_damage=True)
if os.path.isfile(config_path):
link_error_msg = (
"Failed to erase old repository config file securely (hardlinks not supported). "
"Old repokey data, if any, might persist on physical storage."
)
try:
os.link(config_path, old_config_path)
except OSError as e:
if e.errno in (
errno.EMLINK,
errno.ENOSYS,
errno.EPERM,
errno.EACCES,
errno.ENOTSUP,
errno.EIO,
):
logger.warning(link_error_msg)
else:
raise
except AttributeError:
# some python ports have no os.link, see #4901
logger.warning(link_error_msg)
try:
with SaveFile(config_path) as fd:
config.write(fd)
except PermissionError as e:
# error is only a problem if we even had a lock
if self.do_lock:
raise
logger.warning(
"%s: Failed writing to '%s'. This is expected when working on "
"read-only repositories." % (e.strerror, e.filename)
)
if os.path.isfile(old_config_path):
secure_erase(old_config_path, avoid_collateral_damage=True)
def save_key(self, keydata):
assert self.config
keydata = keydata.decode("utf-8") # remote repo: msgpack issue #99, getting bytes
# note: saving an empty key means that there is no repokey any more
self.config.set("repository", "key", keydata)
self.save_config(self.path, self.config)
def load_key(self):
keydata = self.config.get("repository", "key", fallback="").strip()
# note: if we return an empty string, it means there is no repo key
return keydata.encode("utf-8") # remote repo: msgpack issue #99, returning bytes
def destroy(self):
"""Destroy the repository at `self.path`"""
if self.append_only:
raise ValueError(self.path + " is in append-only mode")
self.close()
os.remove(os.path.join(self.path, "config")) # kill config first
shutil.rmtree(self.path)
def get_index_transaction_id(self):
indices = sorted(
int(fn[6:])
for fn in os.listdir(self.path)
if fn.startswith("index.") and fn[6:].isdigit() and os.stat(os.path.join(self.path, fn)).st_size != 0
)
if indices:
return indices[-1]
else:
return None
def check_transaction(self):
index_transaction_id = self.get_index_transaction_id()
segments_transaction_id = self.io.get_segments_transaction_id()
if index_transaction_id is not None and segments_transaction_id is None:
# we have a transaction id from the index, but we did not find *any*
# commit in the segment files (thus no segments transaction id).
# this can happen if a lot of segment files are lost, e.g. due to a
# filesystem or hardware malfunction. it means we have no identifiable
# valid (committed) state of the repo which we could use.
msg = '%s" - although likely this is "beyond repair' % self.path # dirty hack
raise self.CheckNeeded(msg)
# Attempt to rebuild index automatically if we crashed between commit
# tag write and index save.
if index_transaction_id != segments_transaction_id:
if index_transaction_id is not None and index_transaction_id > segments_transaction_id:
replay_from = None
else:
replay_from = index_transaction_id
self.replay_segments(replay_from, segments_transaction_id)
def get_transaction_id(self):
self.check_transaction()
return self.get_index_transaction_id()
def break_lock(self):
Lock(os.path.join(self.path, "lock")).break_lock()
def migrate_lock(self, old_id, new_id):
# note: only needed for local repos
if self.lock is not None:
self.lock.migrate_lock(old_id, new_id)
def open(self, path, exclusive, lock_wait=None, lock=True):
self.path = path
try:
st = os.stat(path)
except FileNotFoundError:
raise self.DoesNotExist(path)
if not stat.S_ISDIR(st.st_mode):
raise self.InvalidRepository(path)
if lock:
self.lock = Lock(os.path.join(path, "lock"), exclusive, timeout=lock_wait).acquire()
else:
self.lock = None
self.config = ConfigParser(interpolation=None)
try:
with open(os.path.join(self.path, "config")) as fd:
self.config.read_file(fd)
except FileNotFoundError:
self.close()
raise self.InvalidRepository(self.path)
if "repository" not in self.config.sections():
self.close()
raise self.InvalidRepositoryConfig(path, "no repository section found")
self.version = self.config.getint("repository", "version")
if self.version not in self.acceptable_repo_versions:
self.close()
raise self.InvalidRepositoryConfig(
path,
"repository version %d is not supported by this borg version" % self.version,
)
self.max_segment_size = parse_file_size(self.config.get("repository", "max_segment_size"))
if self.max_segment_size >= MAX_SEGMENT_SIZE_LIMIT:
self.close()
raise self.InvalidRepositoryConfig(path, "max_segment_size >= %d" % MAX_SEGMENT_SIZE_LIMIT) # issue 3592
self.segments_per_dir = self.config.getint("repository", "segments_per_dir")
self.additional_free_space = parse_file_size(self.config.get("repository", "additional_free_space", fallback=0))
# append_only can be set in the constructor
# it shouldn't be overridden (True -> False) here
self.append_only = self.append_only or self.config.getboolean("repository", "append_only", fallback=False)
if self.storage_quota is None:
# self.storage_quota is None => no explicit storage_quota was specified, use repository setting.
self.storage_quota = parse_file_size(self.config.get("repository", "storage_quota", fallback=0))
self.id = unhexlify(self.config.get("repository", "id").strip())
self.io = LoggedIO(self.path, self.max_segment_size, self.segments_per_dir)
def _load_hints(self):
if (transaction_id := self.get_transaction_id()) is None:
# self is a fresh repo, so transaction_id is None and there is no hints file
return
hints = self._unpack_hints(transaction_id)
self.version = hints["version"]
self.storage_quota_use = hints["storage_quota_use"]
self.shadow_index = hints["shadow_index"]
def info(self):
"""return some infos about the repo (must be opened first)"""
info = dict(id=self.id, version=self.version, append_only=self.append_only)
self._load_hints()
info["storage_quota"] = self.storage_quota
info["storage_quota_use"] = self.storage_quota_use
return info
def close(self):
if self.lock:
if self.io:
self.io.close()
self.io = None
self.lock.release()
self.lock = None
def commit(self, compact=True, threshold=0.1):
"""Commit transaction"""
if self.transaction_doomed:
exception = self.transaction_doomed
self.rollback()
raise exception
self.check_free_space()
segment = self.io.write_commit()
self.segments.setdefault(segment, 0)
self.compact[segment] += LoggedIO.header_fmt.size
if compact and not self.append_only:
self.compact_segments(threshold)
self.write_index()
self.rollback()
def _read_integrity(self, transaction_id, key):
integrity_file = "integrity.%d" % transaction_id
integrity_path = os.path.join(self.path, integrity_file)
try:
with open(integrity_path, "rb") as fd:
integrity = msgpack.unpack(fd)
except FileNotFoundError:
return
if integrity.get("version") != 2:
logger.warning(
"Unknown integrity data version %r in %s",
integrity.get("version"),
integrity_file,
)
return
return integrity[key]
def open_index(self, transaction_id, auto_recover=True):
if transaction_id is None:
return NSIndex()
index_path = os.path.join(self.path, "index.%d" % transaction_id)
variant = hashindex_variant(index_path)
integrity_data = self._read_integrity(transaction_id, "index")
try:
with IntegrityCheckedFile(index_path, write=False, integrity_data=integrity_data) as fd:
if variant == 2:
return NSIndex.read(fd)
if variant == 1: # legacy
return NSIndex1.read(fd)
except (ValueError, OSError, FileIntegrityError) as exc:
logger.warning("Repository index missing or corrupted, trying to recover from: %s", exc)
os.unlink(index_path)
if not auto_recover:
raise
self.prepare_txn(self.get_transaction_id())
# don't leave an open transaction around
self.commit(compact=False)
return self.open_index(self.get_transaction_id())
def _unpack_hints(self, transaction_id):
hints_path = os.path.join(self.path, "hints.%d" % transaction_id)
integrity_data = self._read_integrity(transaction_id, "hints")
with IntegrityCheckedFile(hints_path, write=False, integrity_data=integrity_data) as fd:
return msgpack.unpack(fd)
def prepare_txn(self, transaction_id, do_cleanup=True):
self._active_txn = True
if self.do_lock and not self.lock.got_exclusive_lock():
if self.exclusive is not None:
# self.exclusive is either True or False, thus a new client is active here.
# if it is False and we get here, the caller did not use exclusive=True although
# it is needed for a write operation. if it is True and we get here, something else
# went very wrong, because we should have an exclusive lock, but we don't.
raise AssertionError("bug in code, exclusive lock should exist here")
# if we are here, this is an old client talking to a new server (expecting lock upgrade).
# or we are replaying segments and might need a lock upgrade for that.
try:
self.lock.upgrade()
except (LockError, LockErrorT):
# if upgrading the lock to exclusive fails, we do not have an
# active transaction. this is important for "serve" mode, where
# the repository instance lives on - even if exceptions happened.
self._active_txn = False
raise
if not self.index or transaction_id is None:
try:
self.index = self.open_index(transaction_id, auto_recover=False)
except (ValueError, OSError, FileIntegrityError) as exc:
logger.warning("Checking repository transaction due to previous error: %s", exc)
self.check_transaction()
self.index = self.open_index(transaction_id, auto_recover=False)
if transaction_id is None:
self.segments = {} # XXX bad name: usage_count_of_segment_x = self.segments[x]
self.compact = FreeSpace() # XXX bad name: freeable_space_of_segment_x = self.compact[x]
self.storage_quota_use = 0
self.shadow_index.clear()
else:
if do_cleanup:
self.io.cleanup(transaction_id)
hints_path = os.path.join(self.path, "hints.%d" % transaction_id)
index_path = os.path.join(self.path, "index.%d" % transaction_id)
try:
hints = self._unpack_hints(transaction_id)
except (
msgpack.UnpackException,
FileNotFoundError,
FileIntegrityError,
) as e:
logger.warning(
"Repository hints file missing or corrupted, trying to recover: %s",
e,
)
if not isinstance(e, FileNotFoundError):
os.unlink(hints_path)
# index must exist at this point
os.unlink(index_path)
self.check_transaction()
self.prepare_txn(transaction_id)
return
if hints["version"] == 1:
logger.debug("Upgrading from v1 hints.%d", transaction_id)
self.segments = hints["segments"]
self.compact = FreeSpace()
self.storage_quota_use = 0
self.shadow_index = {}
for segment in sorted(hints["compact"]):
logger.debug("Rebuilding sparse info for segment %d", segment)
self._rebuild_sparse(segment)
logger.debug("Upgrade to v2 hints complete")
elif hints["version"] != 2:
raise ValueError("Unknown hints file version: %d" % hints["version"])
else:
self.segments = hints["segments"]
self.compact = FreeSpace(hints["compact"])
self.storage_quota_use = hints.get("storage_quota_use", 0)
self.shadow_index = hints.get("shadow_index", {})
# Drop uncommitted segments in the shadow index
for key, shadowed_segments in self.shadow_index.items():
for segment in list(shadowed_segments):
if segment > transaction_id:
shadowed_segments.remove(segment)
def write_index(self):
def flush_and_sync(fd):
fd.flush()
os.fsync(fd.fileno())
def rename_tmp(file):
os.replace(file + ".tmp", file)
hints = {
"version": 2,
"segments": self.segments,
"compact": self.compact,
"storage_quota_use": self.storage_quota_use,
"shadow_index": self.shadow_index,
}
integrity = {
# Integrity version started at 2, the current hints version.
# Thus, integrity version == hints version, for now.
"version": 2
}
transaction_id = self.io.get_segments_transaction_id()
assert transaction_id is not None
# Log transaction in append-only mode
if self.append_only:
with open(os.path.join(self.path, "transactions"), "a") as log:
print(
"transaction %d, UTC time %s"
% (
transaction_id,
datetime.now(tz=timezone.utc).isoformat(timespec="microseconds"),
),
file=log,
)
# Write hints file
hints_name = "hints.%d" % transaction_id
hints_file = os.path.join(self.path, hints_name)
with IntegrityCheckedFile(hints_file + ".tmp", filename=hints_name, write=True) as fd:
msgpack.pack(hints, fd)
flush_and_sync(fd)
integrity["hints"] = fd.integrity_data
# Write repository index
index_name = "index.%d" % transaction_id
index_file = os.path.join(self.path, index_name)
with IntegrityCheckedFile(index_file + ".tmp", filename=index_name, write=True) as fd:
# XXX: Consider using SyncFile for index write-outs.
self.index.write(fd)
flush_and_sync(fd)
integrity["index"] = fd.integrity_data
# Write integrity file, containing checksums of the hints and index files
integrity_name = "integrity.%d" % transaction_id
integrity_file = os.path.join(self.path, integrity_name)
with open(integrity_file + ".tmp", "wb") as fd:
msgpack.pack(integrity, fd)
flush_and_sync(fd)
# Rename the integrity file first
rename_tmp(integrity_file)
sync_dir(self.path)
# Rename the others after the integrity file is hypothetically on disk
rename_tmp(hints_file)
rename_tmp(index_file)
sync_dir(self.path)
# Remove old auxiliary files
current = ".%d" % transaction_id
for name in os.listdir(self.path):
if not name.startswith(("index.", "hints.", "integrity.")):
continue
if name.endswith(current):
continue
os.unlink(os.path.join(self.path, name))
self.index = None
def check_free_space(self):
"""Pre-commit check for sufficient free space necessary to perform the commit."""
# As a baseline we take four times the current (on-disk) index size.
# At this point the index may only be updated by compaction, which won't resize it.
# We still apply a factor of four so that a later, separate invocation can free space
# (journaling all deletes for all chunks is one index size) or still make minor additions
# (which may grow the index up to twice its current size).
# Note that in a subsequent operation the committed index is still on-disk, therefore we
# arrive at index_size * (1 + 2 + 1).
# In that order: journaled deletes (1), hashtable growth (2), persisted index (1).
required_free_space = self.index.size() * 4
# Conservatively estimate hints file size:
# 10 bytes for each segment-refcount pair, 10 bytes for each segment-space pair
# Assume maximum of 5 bytes per integer. Segment numbers will usually be packed more densely (1-3 bytes),
# as will refcounts and free space integers. For 5 MiB segments this estimate is good to ~20 PB repo size.
# Add a generous 4K to account for constant format overhead.
hints_size = len(self.segments) * 10 + len(self.compact) * 10 + 4096
required_free_space += hints_size
required_free_space += self.additional_free_space
if not self.append_only:
full_segment_size = self.max_segment_size + MAX_OBJECT_SIZE
if len(self.compact) < 10:
# This is mostly for the test suite to avoid overestimated free space needs. This can be annoying
# if TMP is a small-ish tmpfs.
compact_working_space = 0
for segment, free in self.compact.items():
try:
compact_working_space += self.io.segment_size(segment) - free
except FileNotFoundError:
# looks like self.compact is referring to a nonexistent segment file, ignore it.
pass
logger.debug("check_free_space: Few segments, not requiring a full free segment")
compact_working_space = min(compact_working_space, full_segment_size)
logger.debug(
"check_free_space: Calculated working space for compact as %d bytes",
compact_working_space,
)
required_free_space += compact_working_space
else:
# Keep one full worst-case segment free in non-append-only mode
required_free_space += full_segment_size
try:
free_space = shutil.disk_usage(self.path).free
except OSError as os_error:
logger.warning("Failed to check free space before committing: " + str(os_error))
return
logger.debug(f"check_free_space: Required bytes {required_free_space}, free bytes {free_space}")
if free_space < required_free_space:
if self.created:
logger.error("Not enough free space to initialize repository at this location.")
self.destroy()
else:
self._rollback(cleanup=True)
formatted_required = format_file_size(required_free_space)
formatted_free = format_file_size(free_space)
raise self.InsufficientFreeSpaceError(formatted_required, formatted_free)
def compact_segments(self, threshold):
"""Compact sparse segments by copying data into new segments"""
if not self.compact:
logger.debug("Nothing to do: compact empty")
return
quota_use_before = self.storage_quota_use
index_transaction_id = self.get_index_transaction_id()
segments = self.segments
unused = [] # list of segments, that are not used anymore
def complete_xfer(intermediate=True):
# complete the current transfer (when some target segment is full)
nonlocal unused
# commit the new, compact, used segments
segment = self.io.write_commit(intermediate=intermediate)
self.segments.setdefault(segment, 0)
self.compact[segment] += LoggedIO.header_fmt.size
logger.debug(
"complete_xfer: Wrote %scommit at segment %d",
"intermediate " if intermediate else "",
segment,
)
# get rid of the old, sparse, unused segments. free space.
for segment in unused:
logger.debug("complete_xfer: Deleting unused segment %d", segment)
count = self.segments.pop(segment)
assert count == 0, "Corrupted segment reference count - corrupted index or hints"
self.io.delete_segment(segment)
del self.compact[segment]
unused = []
logger.debug("Compaction started (threshold is %i%%).", threshold * 100)
pi = ProgressIndicatorPercent(
total=len(self.compact),
msg="Compacting segments %3.0f%%",
step=1,
msgid="repository.compact_segments",
)
for segment, freeable_space in sorted(self.compact.items()):
if not self.io.segment_exists(segment):
logger.warning("Segment %d not found, but listed in compaction data", segment)
del self.compact[segment]
pi.show()
self._send_log()
continue
segment_size = self.io.segment_size(segment)
freeable_ratio = 1.0 * freeable_space / segment_size
# we want to compact if:
# - we can free a considerable relative amount of space (freeable_ratio over some threshold)
if not (freeable_ratio > threshold):
logger.debug(
"Not compacting segment %d (maybe freeable: %2.2f%% [%d bytes])",
segment,
freeable_ratio * 100.0,
freeable_space,
)
pi.show()
self._send_log()
continue
segments.setdefault(segment, 0)
logger.debug(
"Compacting segment %d with usage count %d (maybe freeable: %2.2f%% [%d bytes])",
segment,
segments[segment],
freeable_ratio * 100.0,
freeable_space,
)
for tag, key, offset, _, data in self.io.iter_objects(segment):
if tag == TAG_COMMIT:
continue
in_index = self.index.get(key)
is_index_object = in_index and (in_index.segment, in_index.offset) == (
segment,
offset,
)
if tag in (TAG_PUT2, TAG_PUT) and is_index_object:
try:
new_segment, offset = self.io.write_put(key, data, raise_full=True)
except LoggedIO.SegmentFull:
complete_xfer()
new_segment, offset = self.io.write_put(key, data)
self.index[key] = NSIndexEntry(new_segment, offset, len(data))
segments.setdefault(new_segment, 0)
segments[new_segment] += 1
segments[segment] -= 1
if tag == TAG_PUT:
# old tag is PUT, but new will be PUT2 and use a bit more storage
self.storage_quota_use += self.io.ENTRY_HASH_SIZE
elif tag in (TAG_PUT2, TAG_PUT) and not is_index_object:
# If this is a PUT shadowed by a later tag, then it will be gone when this segment is deleted after
# this loop. Therefore it is removed from the shadow index.
try:
self.shadow_index[key].remove(segment)
except (KeyError, ValueError):
# do not remove entry with empty shadowed_segments list here,
# it is needed for shadowed_put_exists code (see below)!
pass
self.storage_quota_use -= header_size(tag) + len(data)
elif tag == TAG_DELETE and not in_index:
# If the shadow index doesn't contain this key, then we can't say if there's a shadowed older tag,
# therefore we do not drop the delete, but write it to a current segment.
key_not_in_shadow_index = key not in self.shadow_index
# If the key is in the shadow index and there is any segment with an older PUT of this
# key, we have a shadowed put.
shadowed_put_exists = key_not_in_shadow_index or any(
shadowed < segment for shadowed in self.shadow_index[key]
)
delete_is_not_stable = index_transaction_id is None or segment > index_transaction_id
if shadowed_put_exists or delete_is_not_stable:
# (introduced in 6425d16aa84be1eaaf88)
# This is needed to avoid object un-deletion if we crash between the commit and the deletion
# of old segments in complete_xfer().
#
# However, this only happens if the crash also affects the FS to the effect that file deletions
# did not materialize consistently after journal recovery. If they always materialize in-order
# then this is not a problem, because the old segment containing a deleted object would be
# deleted before the segment containing the delete.
#
# Consider the following series of operations if we would not do this, i.e. this entire if:
# would be removed.
# Columns are segments, lines are different keys (line 1 = some key, line 2 = some other key)
# Legend: P=TAG_PUT/TAG_PUT2, D=TAG_DELETE, c=commit, i=index is written for latest commit
#
# Segment | 1 | 2 | 3
# --------+-------+-----+------
# Key 1 | P | D |
# Key 2 | P | | P
# commits | c i | c | c i
# --------+-------+-----+------
# ^- compact_segments starts
# ^- complete_xfer commits, after that complete_xfer deletes
# segments 1 and 2 (and then the index would be written).
#
# Now we crash. But only segment 2 gets deleted, while segment 1 is still around. Now key 1
# is suddenly undeleted (because the delete in segment 2 is now missing).
# Again, note the requirement here. We delete these in the correct order that this doesn't
# happen, and only if the FS materialization of these deletes is reordered or parts dropped
# this can happen.
# In this case it doesn't cause outright corruption, 'just' an index count mismatch, which
# will be fixed by borg-check --repair.
#
# Note that in this check the index state is the proxy for a "most definitely settled"
# repository state, i.e. the assumption is that *all* operations on segments <= index state
# are completed and stable.
try:
new_segment, size = self.io.write_delete(key, raise_full=True)
except LoggedIO.SegmentFull:
complete_xfer()
new_segment, size = self.io.write_delete(key)
self.compact[new_segment] += size
segments.setdefault(new_segment, 0)
else:
logger.debug(
"Dropping DEL for id %s - seg %d, iti %r, knisi %r, spe %r, dins %r, si %r",
bin_to_hex(key),
segment,
index_transaction_id,
key_not_in_shadow_index,
shadowed_put_exists,
delete_is_not_stable,
self.shadow_index.get(key),
)
# we did not keep the delete tag for key (see if-branch)
if not self.shadow_index[key]:
# shadowed segments list is empty -> remove it
del self.shadow_index[key]
assert segments[segment] == 0, "Corrupted segment reference count - corrupted index or hints"
unused.append(segment)
pi.show()
self._send_log()
pi.finish()
self._send_log()
complete_xfer(intermediate=False)
self.io.clear_empty_dirs()
quota_use_after = self.storage_quota_use
logger.info(
"Compaction freed about %s repository space.",
format_file_size(quota_use_before - quota_use_after),
)
logger.debug("Compaction completed.")
def replay_segments(self, index_transaction_id, segments_transaction_id):
# fake an old client, so that in case we do not have an exclusive lock yet, prepare_txn will upgrade the lock:
remember_exclusive = self.exclusive
self.exclusive = None
self.prepare_txn(index_transaction_id, do_cleanup=False)
try:
segment_count = sum(1 for _ in self.io.segment_iterator())
pi = ProgressIndicatorPercent(
total=segment_count,
msg="Replaying segments %3.0f%%",
msgid="repository.replay_segments",
)
for i, (segment, filename) in enumerate(self.io.segment_iterator()):
pi.show(i)
self._send_log()
if index_transaction_id is not None and segment <= index_transaction_id:
continue
if segment > segments_transaction_id:
break
objects = self.io.iter_objects(segment)
self._update_index(segment, objects)
pi.finish()
self._send_log()
self.write_index()
finally:
self.exclusive = remember_exclusive
self.rollback()
def _update_index(self, segment, objects, report=None):
"""some code shared between replay_segments and check"""
self.segments[segment] = 0
for tag, key, offset, size, _ in objects:
if tag in (TAG_PUT2, TAG_PUT):
try:
# If this PUT supersedes an older PUT, mark the old segment for compaction and count the free space
in_index = self.index[key]
self.compact[in_index.segment] += header_size(tag) + size
self.segments[in_index.segment] -= 1
except KeyError:
pass
self.index[key] = NSIndexEntry(segment, offset, size)
self.segments[segment] += 1
self.storage_quota_use += header_size(tag) + size
elif tag == TAG_DELETE:
try:
# if the deleted PUT is not in the index, there is nothing to clean up
in_index = self.index.pop(key)
except KeyError:
pass
else:
if self.io.segment_exists(in_index.segment):
# the old index is not necessarily valid for this transaction (e.g. compaction); if the segment
# is already gone, then it was already compacted.
self.segments[in_index.segment] -= 1
self.compact[in_index.segment] += header_size(tag) + in_index.size
elif tag == TAG_COMMIT:
continue
else:
msg = f"Unexpected tag {tag} in segment {segment}"
if report is None:
raise self.CheckNeeded(msg)
else:
report(msg)
if self.segments[segment] == 0:
self.compact[segment] = self.io.segment_size(segment)
def _rebuild_sparse(self, segment):
"""Rebuild sparse bytes count for a single segment relative to the current index."""
try:
segment_size = self.io.segment_size(segment)
except FileNotFoundError:
# segment does not exist any more, remove it from the mappings.
# note: no need to self.compact.pop(segment), as we start from empty mapping.
self.segments.pop(segment)
return
if self.segments[segment] == 0:
self.compact[segment] = segment_size
return
self.compact[segment] = 0
for tag, key, offset, size, _ in self.io.iter_objects(segment, read_data=False):
if tag in (TAG_PUT2, TAG_PUT):
in_index = self.index.get(key)
if not in_index or (in_index.segment, in_index.offset) != (
segment,
offset,
):
# This PUT is superseded later.
self.compact[segment] += header_size(tag) + size
elif tag == TAG_DELETE:
# The outcome of the DELETE has been recorded in the PUT branch already.
self.compact[segment] += header_size(tag) + size
def check(self, repair=False, max_duration=0):
"""Check repository consistency
This method verifies all segment checksums and makes sure
the index is consistent with the data stored in the segments.
"""
if self.append_only and repair:
raise ValueError(self.path + " is in append-only mode")
error_found = False
def report_error(msg):
nonlocal error_found
error_found = True
logger.error(msg)
logger.info("Starting repository check")
assert not self._active_txn
try:
transaction_id = self.get_transaction_id()
current_index = self.open_index(transaction_id)
logger.debug("Read committed index of transaction %d", transaction_id)
except Exception as exc:
transaction_id = self.io.get_segments_transaction_id()
current_index = None
logger.debug("Failed to read committed index (%s)", exc)
if transaction_id is None:
logger.debug("No segments transaction found")
transaction_id = self.get_index_transaction_id()
if transaction_id is None:
logger.debug("No index transaction found, trying latest segment")
transaction_id = self.io.get_latest_segment()
if transaction_id is None:
report_error("This repository contains no valid data.")
return False
if repair:
self.io.cleanup(transaction_id)
segments_transaction_id = self.io.get_segments_transaction_id()
logger.debug("Segment transaction is %s", segments_transaction_id)
logger.debug("Determined transaction is %s", transaction_id)
self.prepare_txn(None) # self.index, self.compact, self.segments, self.shadow_index all empty now!
segment_count = sum(1 for _ in self.io.segment_iterator())
logger.debug("Found %d segments", segment_count)
partial = bool(max_duration)
assert not (repair and partial)
mode = "partial" if partial else "full"
if partial:
# continue a past partial check (if any) or start one from beginning
last_segment_checked = self.config.getint("repository", "last_segment_checked", fallback=-1)
logger.info("Skipping to segments >= %d", last_segment_checked + 1)
else:
# start from the beginning and also forget about any potential past partial checks
last_segment_checked = -1
self.config.remove_option("repository", "last_segment_checked")
self.save_config(self.path, self.config)
t_start = time.monotonic()
pi = ProgressIndicatorPercent(
total=segment_count,
msg="Checking segments %3.1f%%",
step=0.1,
msgid="repository.check",
)
segment = -1 # avoid uninitialized variable if there are no segment files at all
for i, (segment, filename) in enumerate(self.io.segment_iterator()):
pi.show(i)
self._send_log()
if segment <= last_segment_checked:
continue
if segment > transaction_id:
continue
logger.debug("Checking segment file %s...", filename)
try:
objects = list(self.io.iter_objects(segment))
except IntegrityError as err:
report_error(str(err))
objects = []
if repair:
self.io.recover_segment(segment, filename)
objects = list(self.io.iter_objects(segment))
if not partial:
self._update_index(segment, objects, report_error)
if partial and time.monotonic() > t_start + max_duration:
logger.info(
"Finished partial segment check, last segment checked is %d",
segment,
)
self.config.set("repository", "last_segment_checked", str(segment))
self.save_config(self.path, self.config)
break
else:
logger.info("Finished segment check at segment %d", segment)
self.config.remove_option("repository", "last_segment_checked")
self.save_config(self.path, self.config)
pi.finish()
self._send_log()
# self.index, self.segments, self.compact now reflect the state of the segment files up to <transaction_id>.
# We might need to add a commit tag if no committed segment is found.
if repair and segments_transaction_id is None:
report_error(f"Adding commit tag to segment {transaction_id}")
self.io.segment = transaction_id + 1
self.io.write_commit()
if not partial:
logger.info("Starting repository index check")
if current_index and not repair:
# current_index = "as found on disk"
# self.index = "as rebuilt in-memory from segments"
if len(current_index) != len(self.index):
report_error("Index object count mismatch.")
logger.error("Committed index: %d objects", len(current_index))
logger.error("Rebuilt index: %d objects", len(self.index))
else:
logger.info("Index object count match.")
line_format = "ID: %-64s rebuilt index: %-16s committed index: %-16s"
not_found = "<not found>"
for key, value in self.index.iteritems():
current_value = current_index.get(key, not_found)
if current_value != value:
logger.warning(line_format, bin_to_hex(key), value, current_value)
self._send_log()
for key, current_value in current_index.iteritems():
if key in self.index:
continue
value = self.index.get(key, not_found)
if current_value != value:
logger.warning(line_format, bin_to_hex(key), value, current_value)
self._send_log()
if repair:
self.write_index()
self.rollback()
if error_found:
if repair:
logger.info("Finished %s repository check, errors found and repaired.", mode)
else:
logger.error("Finished %s repository check, errors found.", mode)
else:
logger.info("Finished %s repository check, no problems found.", mode)
return not error_found or repair
def scan_low_level(self, segment=None, offset=None):
"""Very low level scan over all segment file entries.
It does NOT care about what's committed and what not.
It does NOT care whether an object might be deleted or superseded later.
It just yields anything it finds in the segment files.
This is intended as a last-resort way to get access to all repo contents of damaged repos,
when there is uncommitted, but valuable data in there...
When segment or segment+offset is given, limit processing to this location only.
"""
for current_segment, filename in self.io.segment_iterator(start_segment=segment, end_segment=segment):
try:
for tag, key, current_offset, _, data in self.io.iter_objects(
segment=current_segment, offset=offset or 0
):
if offset is not None and current_offset > offset:
break
yield key, data, tag, current_segment, current_offset
except IntegrityError as err:
logger.error(
"Segment %d (%s) has IntegrityError(s) [%s] - skipping." % (current_segment, filename, str(err))
)
def _rollback(self, *, cleanup):
if cleanup:
self.io.cleanup(self.io.get_segments_transaction_id())
self.index = None
self._active_txn = False
self.transaction_doomed = None
def rollback(self):
# note: when used in remote mode, this is time limited, see RemoteRepository.shutdown_time.
self._rollback(cleanup=False)
def __len__(self):
if not self.index:
self.index = self.open_index(self.get_transaction_id())
return len(self.index)
def __contains__(self, id):
if not self.index:
self.index = self.open_index(self.get_transaction_id())
return id in self.index
def list(self, limit=None, marker=None, mask=0, value=0):
"""
list <limit> IDs starting from after id <marker> - in index (pseudo-random) order.
if mask and value are given, only return IDs where flags & mask == value (default: all IDs).
"""
if not self.index:
self.index = self.open_index(self.get_transaction_id())
return [id_ for id_, _ in islice(self.index.iteritems(marker=marker, mask=mask, value=value), limit)]
def scan(self, limit=None, state=None):
"""
list (the next) <limit> chunk IDs from the repository - in on-disk order, so that a client
fetching data in this order does linear reads and reuses stuff from disk cache.
state can either be None (initially, when starting to scan) or the object
returned from a previous scan call (meaning "continue scanning").
returns: list of chunk ids, state
We rely on repository.check() has run already (either now or some time before) and that:
- if we are called from a borg check command, self.index is a valid, fresh, in-sync repo index.
- if we are called from elsewhere, either self.index or the on-disk index is valid and in-sync.
- the repository segments are valid (no CRC errors).
if we encounter CRC errors in segment entry headers, rest of segment is skipped.
"""
if limit is not None and limit < 1:
raise ValueError("please use limit > 0 or limit = None")
transaction_id = self.get_transaction_id()
if not self.index:
self.index = self.open_index(transaction_id)
# smallest valid seg is <uint32> 0, smallest valid offs is <uint32> 8
start_segment, start_offset, end_segment = state if state is not None else (0, 0, transaction_id)
ids, segment, offset = [], 0, 0
# we only scan up to end_segment == transaction_id to scan only **committed** chunks,
# avoiding scanning into newly written chunks.
for segment, filename in self.io.segment_iterator(start_segment, end_segment):
# the start_offset we potentially got from state is only valid for the start_segment we also got
# from there. in case the segment file vanished meanwhile, the segment_iterator might never
# return a segment/filename corresponding to the start_segment and we must start from offset 0 then.
start_offset = start_offset if segment == start_segment else 0
obj_iterator = self.io.iter_objects(segment, start_offset, read_data=False)
while True:
try:
tag, id, offset, size, _ = next(obj_iterator)
except (StopIteration, IntegrityError):
# either end-of-segment or an error - we can not seek to objects at
# higher offsets than one that has an error in the header fields.
break
if start_offset > 0:
# we are using a state != None and it points to the last object we have already
# returned in the previous scan() call - thus, we need to skip this one object.
# also, for the next segment, we need to start at offset 0.
start_offset = 0
continue
if tag in (TAG_PUT2, TAG_PUT):
in_index = self.index.get(id)
if in_index and (in_index.segment, in_index.offset) == (
segment,
offset,
):
# we have found an existing and current object
ids.append(id)
if len(ids) == limit:
return ids, (segment, offset, end_segment)
return ids, (segment, offset, end_segment)
def flags(self, id, mask=0xFFFFFFFF, value=None):
"""
query and optionally set flags
:param id: id (key) of object
:param mask: bitmask for flags (default: operate on all 32 bits)
:param value: value to set masked bits to (default: do not change any flags)
:return: (previous) flags value (only masked bits)
"""
if not self.index:
self.index = self.open_index(self.get_transaction_id())
return self.index.flags(id, mask, value)
def flags_many(self, ids, mask=0xFFFFFFFF, value=None):
return [self.flags(id_, mask, value) for id_ in ids]
def get(self, id, read_data=True):
if not self.index:
self.index = self.open_index(self.get_transaction_id())
try:
in_index = NSIndexEntry(*((self.index[id] + (None,))[:3])) # legacy: index entries have no size element
return self.io.read(
in_index.segment,
in_index.offset,
id,
expected_size=in_index.size,
read_data=read_data,
)
except KeyError:
raise self.ObjectNotFound(id, self.path) from None
def get_many(self, ids, read_data=True, is_preloaded=False):
for id_ in ids:
yield self.get(id_, read_data=read_data)
def put(self, id, data, wait=True):
"""put a repo object
Note: when doing calls with wait=False this gets async and caller must
deal with async results / exceptions later.
"""
if not self._active_txn:
self.prepare_txn(self.get_transaction_id())
try:
in_index = self.index[id]
except KeyError:
pass
else:
# note: doing a delete first will do some bookkeeping.
# we do not want to update the shadow_index here, because
# we know already that we will PUT to this id, so it will
# be in the repo index (and we won't need it in the shadow_index).
self._delete(
id,
in_index.segment,
in_index.offset,
in_index.size,
update_shadow_index=False,
)
segment, offset = self.io.write_put(id, data)
self.storage_quota_use += header_size(TAG_PUT2) + len(data)
self.segments.setdefault(segment, 0)
self.segments[segment] += 1
self.index[id] = NSIndexEntry(segment, offset, len(data))
if self.storage_quota and self.storage_quota_use > self.storage_quota:
self.transaction_doomed = self.StorageQuotaExceeded(
format_file_size(self.storage_quota),
format_file_size(self.storage_quota_use),
)
raise self.transaction_doomed
def delete(self, id, wait=True):
"""delete a repo object
Note: when doing calls with wait=False this gets async and caller must
deal with async results / exceptions later.
"""
if not self._active_txn:
self.prepare_txn(self.get_transaction_id())
try:
in_index = self.index.pop(id)
except KeyError:
raise self.ObjectNotFound(id, self.path) from None
# if we get here, there is an object with this id in the repo,
# we write a DEL here that shadows the respective PUT.
# after the delete, the object is not in the repo index any more,
# for the compaction code, we need to update the shadow_index in this case.
self._delete(
id,
in_index.segment,
in_index.offset,
in_index.size,
update_shadow_index=True,
)
def _delete(self, id, segment, offset, size, *, update_shadow_index):
# common code used by put and delete
if update_shadow_index:
self.shadow_index.setdefault(id, []).append(segment)
self.segments[segment] -= 1
self.compact[segment] += header_size(TAG_PUT2) + size
segment, size = self.io.write_delete(id)
self.compact[segment] += size
self.segments.setdefault(segment, 0)
def async_response(self, wait=True):
"""Get one async result (only applies to remote repositories).
async commands (== calls with wait=False, e.g. delete and put) have no results,
but may raise exceptions. These async exceptions must get collected later via
async_response() calls. Repeat the call until it returns None.
The previous calls might either return one (non-None) result or raise an exception.
If wait=True is given and there are outstanding responses, it will wait for them
to arrive. With wait=False, it will only return already received responses.
"""
def preload(self, ids):
"""Preload objects (only applies to remote repositories)"""
class LoggedIO:
class SegmentFull(Exception):
"""raised when a segment is full, before opening next"""
header_fmt = struct.Struct("<IIB")
assert header_fmt.size == 9
header_no_crc_fmt = struct.Struct("<IB")
assert header_no_crc_fmt.size == 5
crc_fmt = struct.Struct("<I")
assert crc_fmt.size == 4
_commit = header_no_crc_fmt.pack(9, TAG_COMMIT)
COMMIT = crc_fmt.pack(crc32(_commit)) + _commit
HEADER_ID_SIZE = header_fmt.size + 32
ENTRY_HASH_SIZE = 8
def __init__(self, path, limit, segments_per_dir, capacity=90):
self.path = path
self.fds = LRUCache(capacity, dispose=self._close_fd)
self.segment = 0
self.limit = limit
self.segments_per_dir = segments_per_dir
self.offset = 0
self._write_fd = None
self._fds_cleaned = 0
def close(self):
self.close_segment()
self.fds.clear()
self.fds = None # Just to make sure we're disabled
def _close_fd(self, ts_fd):
ts, fd = ts_fd
safe_fadvise(fd.fileno(), 0, 0, "DONTNEED")
fd.close()
def get_segment_dirs(
self,
data_dir,
start_index=MIN_SEGMENT_DIR_INDEX,
end_index=MAX_SEGMENT_DIR_INDEX,
):
"""Returns generator yielding required segment dirs in data_dir as `os.DirEntry` objects.
Start and end are inclusive.
"""
segment_dirs = (
f
for f in os.scandir(data_dir)
if f.is_dir() and f.name.isdigit() and start_index <= int(f.name) <= end_index
)
return segment_dirs
def get_segment_files(self, segment_dir, start_index=MIN_SEGMENT_INDEX, end_index=MAX_SEGMENT_INDEX):
"""Returns generator yielding required segment files in segment_dir as `os.DirEntry` objects.
Start and end are inclusive.
"""
segment_files = (
f
for f in os.scandir(segment_dir)
if f.is_file() and f.name.isdigit() and start_index <= int(f.name) <= end_index
)
return segment_files
def segment_iterator(self, start_segment=None, end_segment=None, reverse=False):
if start_segment is None:
start_segment = MIN_SEGMENT_INDEX if not reverse else MAX_SEGMENT_INDEX
if end_segment is None:
end_segment = MAX_SEGMENT_INDEX if not reverse else MIN_SEGMENT_INDEX
data_path = os.path.join(self.path, "data")
start_segment_dir = start_segment // self.segments_per_dir
end_segment_dir = end_segment // self.segments_per_dir
if not reverse:
dirs = self.get_segment_dirs(data_path, start_index=start_segment_dir, end_index=end_segment_dir)
else:
dirs = self.get_segment_dirs(data_path, start_index=end_segment_dir, end_index=start_segment_dir)
dirs = sorted(dirs, key=lambda dir: int(dir.name), reverse=reverse)
for dir in dirs:
if not reverse:
files = self.get_segment_files(dir, start_index=start_segment, end_index=end_segment)
else:
files = self.get_segment_files(dir, start_index=end_segment, end_index=start_segment)
files = sorted(files, key=lambda file: int(file.name), reverse=reverse)
for file in files:
# Note: Do not filter out logically deleted segments (see "File system interaction" above),
# since this is used by cleanup and txn state detection as well.
yield int(file.name), file.path
def get_latest_segment(self):
for segment, filename in self.segment_iterator(reverse=True):
return segment
return None
def get_segments_transaction_id(self):
"""Return the last committed segment."""
for segment, filename in self.segment_iterator(reverse=True):
if self.is_committed_segment(segment):
return segment
return None
def cleanup(self, transaction_id):
"""Delete segment files left by aborted transactions"""
self.close_segment()
self.segment = transaction_id + 1
count = 0
for segment, filename in self.segment_iterator(reverse=True):
if segment > transaction_id:
self.delete_segment(segment)
count += 1
else:
break
logger.debug(
"Cleaned up %d uncommitted segment files (== everything after segment %d).",
count,
transaction_id,
)
def is_committed_segment(self, segment):
"""Check if segment ends with a COMMIT_TAG tag"""
try:
iterator = self.iter_objects(segment)
except IntegrityError:
return False
with open(self.segment_filename(segment), "rb") as fd:
try:
fd.seek(-self.header_fmt.size, os.SEEK_END)
except OSError as e:
# return False if segment file is empty or too small
if e.errno == errno.EINVAL:
return False
raise e
if fd.read(self.header_fmt.size) != self.COMMIT:
return False
seen_commit = False
while True:
try:
tag, key, offset, _, _ = next(iterator)
except IntegrityError:
return False
except StopIteration:
break
if tag == TAG_COMMIT:
seen_commit = True
continue
if seen_commit:
return False
return seen_commit
def segment_filename(self, segment):
return os.path.join(self.path, "data", str(segment // self.segments_per_dir), str(segment))
def get_write_fd(self, no_new=False, want_new=False, raise_full=False):
if not no_new and (want_new or self.offset and self.offset > self.limit):
if raise_full:
raise self.SegmentFull
self.close_segment()
if not self._write_fd:
if self.segment % self.segments_per_dir == 0:
dirname = os.path.join(self.path, "data", str(self.segment // self.segments_per_dir))
if not os.path.exists(dirname):
os.mkdir(dirname)
sync_dir(os.path.join(self.path, "data"))
self._write_fd = SyncFile(self.segment_filename(self.segment), binary=True)
self._write_fd.write(MAGIC)
self.offset = MAGIC_LEN
if self.segment in self.fds:
# we may have a cached fd for a segment file we already deleted and
# we are writing now a new segment file to same file name. get rid of
# the cached fd that still refers to the old file, so it will later
# get repopulated (on demand) with a fd that refers to the new file.
del self.fds[self.segment]
return self._write_fd
def get_fd(self, segment):
# note: get_fd() returns a fd with undefined file pointer position,
# so callers must always seek() to desired position afterwards.
now = time.monotonic()
def open_fd():
fd = open(self.segment_filename(segment), "rb")
self.fds[segment] = (now, fd)
return fd
def clean_old():
# we regularly get rid of all old FDs here:
if now - self._fds_cleaned > FD_MAX_AGE // 8:
self._fds_cleaned = now
for k, ts_fd in list(self.fds.items()):
ts, fd = ts_fd
if now - ts > FD_MAX_AGE:
# we do not want to touch long-unused file handles to
# avoid ESTALE issues (e.g. on network filesystems).
del self.fds[k]
clean_old()
if self._write_fd is not None:
# without this, we have a test failure now
self._write_fd.sync()
try:
ts, fd = self.fds[segment]
except KeyError:
fd = open_fd()
else:
# we only have fresh enough stuff here.
# update the timestamp of the lru cache entry.
self.fds.replace(segment, (now, fd))
return fd
def close_segment(self):
# set self._write_fd to None early to guard against reentry from error handling code paths:
fd, self._write_fd = self._write_fd, None
if fd is not None:
self.segment += 1
self.offset = 0
fd.close()
def delete_segment(self, segment):
if segment in self.fds:
del self.fds[segment]
try:
safe_unlink(self.segment_filename(segment))
except FileNotFoundError:
pass
def clear_empty_dirs(self):
"""Delete empty segment dirs, i.e those with no segment files."""
data_dir = os.path.join(self.path, "data")
segment_dirs = self.get_segment_dirs(data_dir)
for segment_dir in segment_dirs:
try:
# os.rmdir will only delete the directory if it is empty
# so we don't need to explicitly check for emptiness first.
os.rmdir(segment_dir)
except OSError:
# OSError is raised by os.rmdir if directory is not empty. This is expected.
# Its subclass FileNotFoundError may be raised if the directory already does not exist. Ignorable.
pass
sync_dir(data_dir)
def segment_exists(self, segment):
filename = self.segment_filename(segment)
# When deleting segments, they are first truncated. If truncate(2) and unlink(2) are split
# across FS transactions, then logically deleted segments will show up as truncated.
return os.path.exists(filename) and os.path.getsize(filename)
def segment_size(self, segment):
return os.path.getsize(self.segment_filename(segment))
def get_segment_magic(self, segment):
fd = self.get_fd(segment)
fd.seek(0)
return fd.read(MAGIC_LEN)
def iter_objects(self, segment, offset=0, read_data=True):
"""
Return object iterator for *segment*.
See the _read() docstring about confidence in the returned data.
The iterator returns five-tuples of (tag, key, offset, size, data).
"""
fd = self.get_fd(segment)
fd.seek(offset)
if offset == 0:
# we are touching this segment for the first time, check the MAGIC.
# Repository.scan() calls us with segment > 0 when it continues an ongoing iteration
# from a marker position - but then we have checked the magic before already.
if fd.read(MAGIC_LEN) != MAGIC:
raise IntegrityError(f"Invalid segment magic [segment {segment}, offset {0}]")
offset = MAGIC_LEN
header = fd.read(self.header_fmt.size)
while header:
size, tag, key, data = self._read(
fd,
header,
segment,
offset,
(TAG_PUT2, TAG_DELETE, TAG_COMMIT, TAG_PUT),
read_data=read_data,
)
# tuple[3]: corresponds to len(data) == length of the full chunk payload (meta_len+enc_meta+enc_data)
# tuple[4]: data will be None if read_data is False.
yield tag, key, offset, size - header_size(tag), data
assert size >= 0
offset += size
# we must get the fd via get_fd() here again as we yielded to our caller and it might
# have triggered closing of the fd we had before (e.g. by calling io.read() for
# different segment(s)).
# by calling get_fd() here again we also make our fd "recently used" so it likely
# does not get kicked out of self.fds LRUcache.
fd = self.get_fd(segment)
fd.seek(offset)
header = fd.read(self.header_fmt.size)
def recover_segment(self, segment, filename):
logger.info("Attempting to recover " + filename)
if segment in self.fds:
del self.fds[segment]
if os.path.getsize(filename) < MAGIC_LEN + self.header_fmt.size:
# this is either a zero-byte file (which would crash mmap() below) or otherwise
# just too small to be a valid non-empty segment file, so do a shortcut here:
with SaveFile(filename, binary=True) as fd:
fd.write(MAGIC)
return
with SaveFile(filename, binary=True) as dst_fd:
with open(filename, "rb") as src_fd:
# note: file must not be 0 size or mmap() will crash.
with mmap.mmap(src_fd.fileno(), 0, access=mmap.ACCESS_READ) as mm:
# memoryview context manager is problematic, see https://bugs.python.org/issue35686
data = memoryview(mm)
d = data
try:
dst_fd.write(MAGIC)
while len(d) >= self.header_fmt.size:
crc, size, tag = self.header_fmt.unpack(d[: self.header_fmt.size])
size_invalid = size > MAX_OBJECT_SIZE or size < self.header_fmt.size or size > len(d)
if size_invalid or tag > MAX_TAG_ID:
d = d[1:]
continue
if tag == TAG_PUT2:
c_offset = self.HEADER_ID_SIZE + self.ENTRY_HASH_SIZE
# skip if header is invalid
if crc32(d[4:c_offset]) & 0xFFFFFFFF != crc:
d = d[1:]
continue
# skip if content is invalid
if (
self.entry_hash(d[4 : self.HEADER_ID_SIZE], d[c_offset:size])
!= d[self.HEADER_ID_SIZE : c_offset]
):
d = d[1:]
continue
elif tag in (TAG_DELETE, TAG_COMMIT, TAG_PUT):
if crc32(d[4:size]) & 0xFFFFFFFF != crc:
d = d[1:]
continue
else: # tag unknown
d = d[1:]
continue
dst_fd.write(d[:size])
d = d[size:]
finally:
del d
data.release()
def entry_hash(self, *data):
h = StreamingXXH64()
for d in data:
h.update(d)
return h.digest()
def read(self, segment, offset, id, *, read_data=True, expected_size=None):
"""
Read entry from *segment* at *offset* with *id*.
See the _read() docstring about confidence in the returned data.
"""
if segment == self.segment and self._write_fd:
self._write_fd.sync()
fd = self.get_fd(segment)
fd.seek(offset)
header = fd.read(self.header_fmt.size)
size, tag, key, data = self._read(fd, header, segment, offset, (TAG_PUT2, TAG_PUT), read_data=read_data)
if id != key:
raise IntegrityError(
f"Invalid segment entry header, is not for wanted id [segment {segment}, offset {offset}]"
)
data_size_from_header = size - header_size(tag)
if expected_size is not None and expected_size != data_size_from_header:
raise IntegrityError(
f"size from repository index: {expected_size} != " f"size from entry header: {data_size_from_header}"
)
return data
def _read(self, fd, header, segment, offset, acceptable_tags, read_data=True):
"""
Code shared by read() and iter_objects().
Confidence in returned data:
PUT2 tags, read_data == True: crc32 check (header) plus digest check (header+data)
PUT2 tags, read_data == False: crc32 check (header)
PUT tags, read_data == True: crc32 check (header+data)
PUT tags, read_data == False: crc32 check can not be done, all data obtained must be considered informational
read_data == False behaviour:
PUT2 tags: return enough of the chunk so that the client is able to decrypt the metadata,
do not read, but just seek over the data.
PUT tags: return None and just seek over the data.
"""
def check_crc32(wanted, header, *data):
result = crc32(memoryview(header)[4:]) # skip first 32 bits of the header, they contain the crc.
for d in data:
result = crc32(d, result)
if result & 0xFFFFFFFF != wanted:
raise IntegrityError(f"Segment entry header checksum mismatch [segment {segment}, offset {offset}]")
# See comment on MAX_TAG_ID for details
assert max(acceptable_tags) <= MAX_TAG_ID, "Exceeding MAX_TAG_ID will break backwards compatibility"
key = data = None
fmt = self.header_fmt
try:
hdr_tuple = fmt.unpack(header)
except struct.error as err:
raise IntegrityError(f"Invalid segment entry header [segment {segment}, offset {offset}]: {err}") from None
crc, size, tag = hdr_tuple
length = size - fmt.size # we already read the header
if size > MAX_OBJECT_SIZE:
# if you get this on an archive made with borg < 1.0.7 and millions of files and
# you need to restore it, you can disable this check by using "if False:" above.
raise IntegrityError(f"Invalid segment entry size {size} - too big [segment {segment}, offset {offset}]")
if size < fmt.size:
raise IntegrityError(f"Invalid segment entry size {size} - too small [segment {segment}, offset {offset}]")
if tag not in (TAG_PUT2, TAG_DELETE, TAG_COMMIT, TAG_PUT):
raise IntegrityError(
f"Invalid segment entry header, did not get a known tag " f"[segment {segment}, offset {offset}]"
)
if tag not in acceptable_tags:
raise IntegrityError(
f"Invalid segment entry header, did not get acceptable tag " f"[segment {segment}, offset {offset}]"
)
if tag == TAG_COMMIT:
check_crc32(crc, header)
# that's all for COMMITs.
else:
# all other tags (TAG_PUT2, TAG_DELETE, TAG_PUT) have a key
key = fd.read(32)
length -= 32
if len(key) != 32:
raise IntegrityError(
f"Segment entry key short read [segment {segment}, offset {offset}]: "
f"expected {32}, got {len(key)} bytes"
)
if tag == TAG_DELETE:
check_crc32(crc, header, key)
# that's all for DELETEs.
else:
# TAG_PUT: we can not do a crc32 header check here, because the crc32 is computed over header+data!
# for the check, see code below when read_data is True.
if tag == TAG_PUT2:
entry_hash = fd.read(self.ENTRY_HASH_SIZE)
length -= self.ENTRY_HASH_SIZE
if len(entry_hash) != self.ENTRY_HASH_SIZE:
raise IntegrityError(
f"Segment entry hash short read [segment {segment}, offset {offset}]: "
f"expected {self.ENTRY_HASH_SIZE}, got {len(entry_hash)} bytes"
)
check_crc32(crc, header, key, entry_hash)
if not read_data:
if tag == TAG_PUT2:
# PUT2 is only used in new repos and they also have different RepoObj layout,
# supporting separately encrypted metadata and data.
# In this case, we return enough bytes so the client can decrypt the metadata
# and seek over the rest (over the encrypted data).
meta_len_size = RepoObj.meta_len_hdr.size
meta_len = fd.read(meta_len_size)
length -= meta_len_size
if len(meta_len) != meta_len_size:
raise IntegrityError(
f"Segment entry meta length short read [segment {segment}, offset {offset}]: "
f"expected {meta_len_size}, got {len(meta_len)} bytes"
)
ml = RepoObj.meta_len_hdr.unpack(meta_len)[0]
meta = fd.read(ml)
length -= ml
if len(meta) != ml:
raise IntegrityError(
f"Segment entry meta short read [segment {segment}, offset {offset}]: "
f"expected {ml}, got {len(meta)} bytes"
)
data = meta_len + meta # shortened chunk - enough so the client can decrypt the metadata
# we do not have a checksum for this data, but the client's AEAD crypto will check it.
# in any case, we see over the remainder of the chunk
oldpos = fd.tell()
seeked = fd.seek(length, os.SEEK_CUR) - oldpos
if seeked != length:
raise IntegrityError(
f"Segment entry data short seek [segment {segment}, offset {offset}]: "
f"expected {length}, got {seeked} bytes"
)
else: # read data!
data = fd.read(length)
if len(data) != length:
raise IntegrityError(
f"Segment entry data short read [segment {segment}, offset {offset}]: "
f"expected {length}, got {len(data)} bytes"
)
if tag == TAG_PUT2:
if self.entry_hash(memoryview(header)[4:], key, data) != entry_hash:
raise IntegrityError(f"Segment entry hash mismatch [segment {segment}, offset {offset}]")
elif tag == TAG_PUT:
check_crc32(crc, header, key, data)
return size, tag, key, data
def write_put(self, id, data, raise_full=False):
data_size = len(data)
if data_size > MAX_DATA_SIZE:
# this would push the segment entry size beyond MAX_OBJECT_SIZE.
raise IntegrityError(f"More than allowed put data [{data_size} > {MAX_DATA_SIZE}]")
fd = self.get_write_fd(want_new=(id == Manifest.MANIFEST_ID), raise_full=raise_full)
size = data_size + self.HEADER_ID_SIZE + self.ENTRY_HASH_SIZE
offset = self.offset
header = self.header_no_crc_fmt.pack(size, TAG_PUT2)
entry_hash = self.entry_hash(header, id, data)
crc = self.crc_fmt.pack(crc32(entry_hash, crc32(id, crc32(header))) & 0xFFFFFFFF)
fd.write(b"".join((crc, header, id, entry_hash)))
fd.write(data)
self.offset += size
return self.segment, offset
def write_delete(self, id, raise_full=False):
fd = self.get_write_fd(want_new=(id == Manifest.MANIFEST_ID), raise_full=raise_full)
header = self.header_no_crc_fmt.pack(self.HEADER_ID_SIZE, TAG_DELETE)
crc = self.crc_fmt.pack(crc32(id, crc32(header)) & 0xFFFFFFFF)
fd.write(b"".join((crc, header, id)))
self.offset += self.HEADER_ID_SIZE
return self.segment, self.HEADER_ID_SIZE
def write_commit(self, intermediate=False):
# Intermediate commits go directly into the current segment - this makes checking their validity more
# expensive, but is faster and reduces clobber. Final commits go into a new segment.
fd = self.get_write_fd(want_new=not intermediate, no_new=intermediate)
if intermediate:
fd.sync()
header = self.header_no_crc_fmt.pack(self.header_fmt.size, TAG_COMMIT)
crc = self.crc_fmt.pack(crc32(header) & 0xFFFFFFFF)
fd.write(b"".join((crc, header)))
self.close_segment()
return self.segment - 1 # close_segment() increments it
assert LoggedIO.HEADER_ID_SIZE + LoggedIO.ENTRY_HASH_SIZE == 41 + 8 # see constants.MAX_OBJECT_SIZE
|
views | escalation_chain | from apps.alerts.models import EscalationChain
from apps.api.permissions import RBACPermission
from apps.api.serializers.escalation_chain import (
EscalationChainListSerializer,
EscalationChainSerializer,
FilterEscalationChainSerializer,
)
from apps.auth_token.auth import PluginAuthentication
from apps.mobile_app.auth import MobileAppAuthTokenAuthentication
from apps.user_management.models import Team
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.filters import (
ByTeamModelFieldFilterMixin,
ModelFieldFilterMixin,
TeamModelMultipleChoiceFilter,
)
from common.api_helpers.mixins import (
FilterSerializerMixin,
ListSerializerMixin,
PublicPrimaryKeyMixin,
TeamFilteringMixin,
)
from common.insight_log import EntityEvent, write_resource_insight_log
from django.db.models import Count, Q
from django_filters import rest_framework as filters
from emoji import emojize
from rest_framework import status, viewsets
from rest_framework.decorators import action
from rest_framework.filters import SearchFilter
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
class EscalationChainFilter(
ByTeamModelFieldFilterMixin, ModelFieldFilterMixin, filters.FilterSet
):
team = TeamModelMultipleChoiceFilter()
class EscalationChainViewSet(
TeamFilteringMixin,
PublicPrimaryKeyMixin,
FilterSerializerMixin,
ListSerializerMixin,
viewsets.ModelViewSet,
):
authentication_classes = (
MobileAppAuthTokenAuthentication,
PluginAuthentication,
)
permission_classes = (IsAuthenticated, RBACPermission)
rbac_permissions = {
"metadata": [RBACPermission.Permissions.ESCALATION_CHAINS_READ],
"list": [RBACPermission.Permissions.ESCALATION_CHAINS_READ],
"retrieve": [RBACPermission.Permissions.ESCALATION_CHAINS_READ],
"details": [RBACPermission.Permissions.ESCALATION_CHAINS_READ],
"create": [RBACPermission.Permissions.ESCALATION_CHAINS_WRITE],
"update": [RBACPermission.Permissions.ESCALATION_CHAINS_WRITE],
"destroy": [RBACPermission.Permissions.ESCALATION_CHAINS_WRITE],
"copy": [RBACPermission.Permissions.ESCALATION_CHAINS_WRITE],
"filters": [RBACPermission.Permissions.ESCALATION_CHAINS_READ],
}
filter_backends = [SearchFilter, filters.DjangoFilterBackend]
search_fields = ("name",)
filterset_class = EscalationChainFilter
serializer_class = EscalationChainSerializer
list_serializer_class = EscalationChainListSerializer
filter_serializer_class = FilterEscalationChainSerializer
def get_queryset(self, ignore_filtering_by_available_teams=False):
is_filters_request = self.request.query_params.get("filters", "false") == "true"
queryset = EscalationChain.objects.filter(
organization=self.request.auth.organization,
)
if not ignore_filtering_by_available_teams:
queryset = queryset.filter(*self.available_teams_lookup_args).distinct()
if is_filters_request:
# Do not annotate num_integrations and num_routes for filters request,
# only fetch public_primary_key and name fields needed by FilterEscalationChainSerializer
return queryset.only("public_primary_key", "name")
queryset = queryset.annotate(
num_integrations=Count(
"channel_filters__alert_receive_channel",
distinct=True,
filter=Q(
channel_filters__alert_receive_channel__deleted_at__isnull=True
),
)
).annotate(
num_routes=Count(
"channel_filters",
distinct=True,
filter=Q(
channel_filters__alert_receive_channel__deleted_at__isnull=True
),
)
)
return queryset
def perform_create(self, serializer):
serializer.save()
write_resource_insight_log(
instance=serializer.instance,
author=self.request.user,
event=EntityEvent.CREATED,
)
def perform_destroy(self, instance):
write_resource_insight_log(
instance=instance,
author=self.request.user,
event=EntityEvent.DELETED,
)
instance.delete()
def perform_update(self, serializer):
prev_state = serializer.instance.insight_logs_serialized
serializer.save()
new_state = serializer.instance.insight_logs_serialized
write_resource_insight_log(
instance=serializer.instance,
author=self.request.user,
event=EntityEvent.UPDATED,
prev_state=prev_state,
new_state=new_state,
)
@action(methods=["post"], detail=True)
def copy(self, request, pk):
obj = self.get_object()
name = request.data.get("name")
team_id = request.data.get("team")
if team_id == "null":
team_id = None
if not name:
raise BadRequest(detail={"name": ["This field may not be null."]})
else:
if EscalationChain.objects.filter(
organization=request.auth.organization, name=name
).exists():
raise BadRequest(
detail={"name": ["Escalation chain with this name already exists."]}
)
try:
team = (
request.user.available_teams.get(public_primary_key=team_id)
if team_id
else None
)
except Team.DoesNotExist:
return Response(
data={"error_code": "wrong_team"}, status=status.HTTP_403_FORBIDDEN
)
copy = obj.make_copy(name, team)
serializer = self.get_serializer(copy)
write_resource_insight_log(
instance=copy,
author=self.request.user,
event=EntityEvent.CREATED,
)
return Response(serializer.data)
@action(methods=["get"], detail=True)
def details(self, request, pk):
obj = self.get_object()
channel_filters = obj.channel_filters.filter(
alert_receive_channel__deleted_at__isnull=True
).values(
"public_primary_key",
"filtering_term",
"is_default",
"alert_receive_channel__public_primary_key",
"alert_receive_channel__verbal_name",
)
data = {}
for channel_filter in channel_filters:
channel_filter_data = {
"display_name": "Default Route"
if channel_filter["is_default"]
else channel_filter["filtering_term"],
"id": channel_filter["public_primary_key"],
}
data.setdefault(
channel_filter["alert_receive_channel__public_primary_key"],
{
"id": channel_filter["alert_receive_channel__public_primary_key"],
"display_name": emojize(
channel_filter["alert_receive_channel__verbal_name"],
language="alias",
),
"channel_filters": [],
},
)["channel_filters"].append(channel_filter_data)
return Response(data.values())
@action(methods=["get"], detail=False)
def filters(self, request):
filter_name = request.query_params.get("search", None)
api_root = "/api/internal/v1/"
filter_options = [
{
"name": "team",
"type": "team_select",
"href": api_root + "teams/",
"global": True,
},
]
if filter_name is not None:
filter_options = list(
filter(lambda f: filter_name in f["name"], filter_options)
)
return Response(filter_options)
|
clients | transmission | import os
import mylar
from mylar import logger
from transmissionrpc import Client
class TorrentClient(object):
def __init__(self):
self.conn = None
def connect(self, host, username, password):
if self.conn is not None:
return self.conn
if not host:
return False
try:
if username and password:
self.conn = Client(host, user=username, password=password)
else:
self.conn = Client(host)
except:
logger.error("Could not connect to %h" % host)
return False
return self.conn
def find_torrent(self, hash):
try:
return self.conn.get_torrent(hash)
except KeyError:
logger.error("torrent %s does not exist")
return False
def get_torrent(self, torrent):
torrent = self.conn.get_torrent(torrent.hashString)
torrent_files = []
torrent_directory = os.path.normpath(torrent.downloadDir)
for f in torrent.files().itervalues():
if not os.path.normpath(f["name"]).startswith(torrent_directory):
file_path = os.path.join(torrent_directory, f["name"].lstrip("/"))
else:
file_path = f["name"]
torrent_files.append(file_path)
torrent_info = {
"hash": torrent.hashString,
"name": torrent.name,
"folder": torrent.downloadDir,
"completed": torrent.progress == 100,
"label": "None", ## labels not supported in transmission - for when it's in transmission
"files": torrent_files,
"upload_total": torrent.uploadedEver,
"download_total": torrent.downloadedEver,
"ratio": torrent.ratio,
"total_filesize": torrent.sizeWhenDone,
"time_started": torrent.date_started,
}
logger.debug(torrent_info)
return torrent_info if torrent_info else False
def start_torrent(self, torrent):
return torrent.start()
def stop_torrent(self, torrent):
return torrent.stop()
def load_torrent(self, filepath):
if any(
[
mylar.CONFIG.TRANSMISSION_DIRECTORY is None,
mylar.CONFIG.TRANSMISSION_DIRECTORY == "",
mylar.CONFIG.TRANSMISSION_DIRECTORY == "None",
]
):
down_dir = mylar.CONFIG.CHECK_FOLDER
else:
down_dir = mylar.CONFIG.TRANSMISSION_DIRECTORY
if filepath.startswith("magnet"):
torrent = self.conn.add_torrent("%s" % filepath, download_dir=down_dir)
else:
torrent = self.conn.add_torrent(
"file://%s" % filepath, download_dir=down_dir
)
torrent.start()
return self.get_torrent(torrent)
def delete_torrent(self, torrent):
deleted = []
files = torrent.files()
for file_item in files.itervalues():
file_path = os.path.join(torrent.downloadDir, file_item["name"])
deleted.append(file_path)
if len(files) > 1:
torrent_path = os.path.join(torrent.downloadDir, torrent.name)
for path, _, _ in os.walk(torrent_path, topdown=False):
deleted.append(path)
if self.conn.remove_torrent(torrent.hashString, delete_data=True):
return deleted
else:
logger.error("Unable to delete %s" % torrent.name)
return []
|
extractor | cbs | from __future__ import unicode_literals
from ..utils import (
ExtractorError,
find_xpath_attr,
int_or_none,
update_url_query,
xpath_element,
xpath_text,
)
from .theplatform import ThePlatformFeedIE
class CBSBaseIE(ThePlatformFeedIE):
def _parse_smil_subtitles(self, smil, namespace=None, subtitles_lang="en"):
subtitles = {}
for k, ext in [
("sMPTE-TTCCURL", "tt"),
("ClosedCaptionURL", "ttml"),
("webVTTCaptionURL", "vtt"),
]:
cc_e = find_xpath_attr(
smil, self._xpath_ns(".//param", namespace), "name", k
)
if cc_e is not None:
cc_url = cc_e.get("value")
if cc_url:
subtitles.setdefault(subtitles_lang, []).append(
{
"ext": ext,
"url": cc_url,
}
)
return subtitles
class CBSIE(CBSBaseIE):
_VALID_URL = r"(?:cbs:|https?://(?:www\.)?(?:(?:cbs|paramountplus)\.com/shows/[^/]+/video|colbertlateshow\.com/(?:video|podcasts))/)(?P<id>[\w-]+)"
_TESTS = [
{
"url": "http://www.cbs.com/shows/garth-brooks/video/_u7W953k6la293J7EPTd9oHkSPs6Xn6_/connect-chat-feat-garth-brooks/",
"info_dict": {
"id": "_u7W953k6la293J7EPTd9oHkSPs6Xn6_",
"ext": "mp4",
"title": "Connect Chat feat. Garth Brooks",
"description": "Connect with country music singer Garth Brooks, as he chats with fans on Wednesday November 27, 2013. Be sure to tune in to Garth Brooks: Live from Las Vegas, Friday November 29, at 9/8c on CBS!",
"duration": 1495,
"timestamp": 1385585425,
"upload_date": "20131127",
"uploader": "CBSI-NEW",
},
"params": {
# m3u8 download
"skip_download": True,
},
"_skip": "Blocked outside the US",
},
{
"url": "http://colbertlateshow.com/video/8GmB0oY0McANFvp2aEffk9jZZZ2YyXxy/the-colbeard/",
"only_matching": True,
},
{
"url": "http://www.colbertlateshow.com/podcasts/dYSwjqPs_X1tvbV_P2FcPWRa_qT6akTC/in-the-bad-room-with-stephen/",
"only_matching": True,
},
{
"url": "https://www.paramountplus.com/shows/all-rise/video/QmR1WhNkh1a_IrdHZrbcRklm176X_rVc/all-rise-space/",
"only_matching": True,
},
]
def _extract_video_info(self, content_id, site="cbs", mpx_acc=2198311517):
items_data = self._download_xml(
"http://can.cbs.com/thunder/player/videoPlayerService.php",
content_id,
query={"partner": site, "contentId": content_id},
)
video_data = xpath_element(items_data, ".//item")
title = xpath_text(video_data, "videoTitle", "title", True)
tp_path = "dJ5BDC/media/guid/%d/%s" % (mpx_acc, content_id)
tp_release_url = "http://link.theplatform.com/s/" + tp_path
asset_types = []
subtitles = {}
formats = []
last_e = None
for item in items_data.findall(".//item"):
asset_type = xpath_text(item, "assetType")
if (
not asset_type
or asset_type in asset_types
or "HLS_FPS" in asset_type
or "DASH_CENC" in asset_type
):
continue
asset_types.append(asset_type)
query = {
"mbr": "true",
"assetTypes": asset_type,
}
if asset_type.startswith("HLS") or asset_type in ("OnceURL", "StreamPack"):
query["formats"] = "MPEG4,M3U"
elif asset_type in ("RTMP", "WIFI", "3G"):
query["formats"] = "MPEG4,FLV"
try:
tp_formats, tp_subtitles = self._extract_theplatform_smil(
update_url_query(tp_release_url, query),
content_id,
"Downloading %s SMIL data" % asset_type,
)
except ExtractorError as e:
last_e = e
continue
formats.extend(tp_formats)
subtitles = self._merge_subtitles(subtitles, tp_subtitles)
if last_e and not formats:
raise last_e
self._sort_formats(formats)
info = self._extract_theplatform_metadata(tp_path, content_id)
info.update(
{
"id": content_id,
"title": title,
"series": xpath_text(video_data, "seriesTitle"),
"season_number": int_or_none(xpath_text(video_data, "seasonNumber")),
"episode_number": int_or_none(xpath_text(video_data, "episodeNumber")),
"duration": int_or_none(xpath_text(video_data, "videoLength"), 1000),
"thumbnail": xpath_text(video_data, "previewImageURL"),
"formats": formats,
"subtitles": subtitles,
}
)
return info
def _real_extract(self, url):
content_id = self._match_id(url)
return self._extract_video_info(content_id)
|
ocrolib | toplevel | from __future__ import print_function
import functools
import linecache
import os
import sys
import warnings
from types import NoneType
import numpy as np
# FIXME from ... import wrap
### printing
def strc(arg, n=10):
"""Compact version of `str`."""
if isinstance(arg, float):
return "%.3g" % arg
if type(arg) == list:
return "[%s|%d]" % (",".join([strc(x) for x in arg[:3]]), len(arg))
if type(arg) == np.ndarray:
return "<ndarray-%x %s %s [%s,%s]>" % (
id(arg),
arg.shape,
str(arg.dtype),
np.amin(arg),
np.amax(arg),
)
return str(arg).replace("\n", " ")
### deprecation warnings
def deprecated(f):
"""Prints a deprecation warning when called."""
@functools.wraps(f)
def wrapper(*args, **kw):
warnings.warn_explicit(
"calling deprecated function %s" % f.__name__,
category=DeprecationWarning,
filename=f.func_code.co_filename,
lineno=f.func_code.co_firstlineno + 1,
)
return f(*args, **kw)
return wrapper
def failfunc(f):
@functools.wraps(f)
def wrapper(*args, **kw):
raise Exception("don't call %s anymore" % f)
return wrapper
obsolete = failfunc
### debugging / tracing
_trace1_depth = 0
def trace1(f):
"""Print arguments/return values for the decorated function before each call."""
name = f.func_name
argnames = f.func_code.co_varnames[: f.func_code.co_argcount]
@functools.wraps(f)
def wrapper(*args, **kw):
try:
global _trace1_depth
_trace1_depth += 1
print(" " * _trace1_depth, "ENTER", name, ":", end=" ")
for k, v in zip(argnames, args) + kw.items():
print("%s=%s" % (k, strc(v)), end=" ")
print()
result = f(*args, **kw)
print(" " * _trace1_depth, "LEAVE", name, ":", strc(result))
return result
except Exception as e:
print(" " * _trace1_depth, "ERROR", name, ":", e)
raise
finally:
_trace1_depth -= 1
return wrapper
def tracing(f):
"""Enable tracing just within a function call."""
def globaltrace(frame, why, arg):
if why == "call":
return localtrace
return None
def localtrace(frame, why, arg):
if why == "line":
fname = frame.f_code.co_filename
lineno = frame.f_lineno
base = os.path.basename(fname)
print("%s(%s): %s" % (base, lineno, linecache.getline(fname, lineno)))
return localtrace
@wrap(f)
def wrapper(*args, **kw):
sys.settrace(globaltrace)
result = f(*args, **kw)
sys.settrace(None)
return result
return wrapper
def method(cls):
"""Adds the function as a method to the given class."""
import new
def _wrap(f):
cls.__dict__[f.func_name] = new.instancemethod(f, None, cls)
return None
return _wrap
def unchanged(f):
"This decorator doesn't add any behavior"
return f
def disabled(value=None):
"""Disables the function so that it does nothing. Optionally
returns the given value."""
def wrapper(f):
@functools.wraps(f)
def g(*args, **kw):
return value
return g
return wrapper
def replacedby(g):
"""Replace the function with another function."""
def wrapper(f):
@functools.wraps(f)
def wrapped(*args, **kw):
return g(*args, **kw)
return wrapped
return wrapper
### type and range checks for arguments and return values
class CheckError(Exception):
def __init__(self, *args, **kw):
self.fun = kw.get("fun", "?")
self.var = kw.get("var", "?")
self.description = " ".join([strc(x) for x in args])
def __str__(self):
result = "\nCheckError for argument "
result += str(self.var)
result += " of function "
result += str(self.fun)
result += "\n"
result += self.description
return result
class CheckWarning(CheckError):
def __init__(self, *args, **kw):
self.fun = kw.get("fun", "?")
self.var = kw.get("var", "?")
self.description = " ".join([strc(x) for x in args])
def __str__(self):
result = "\nCheckWarning for argument "
result += str(self.var)
result += " of function "
result += str(self.fun)
result += "\n"
result += self.description
result += "(This can happen occasionally during normal operations and isn't necessarily a bug or problem.)\n"
return result
def checktype(value, type_):
"""Check value against the type spec. If everything
is OK, this just returns the value itself.
If the types don't check out, an exception is thrown."""
# True skips any check
if type_ is True:
return value
# types are checked using isinstance
if type(type_) == type:
if not isinstance(value, type_):
raise CheckError(
"isinstance failed",
value,
"of type",
type(value),
"is not of type",
type_,
)
return value
# for a list, check that all elements of a collection have a type
# of some list element, allowing declarations like [str] or [str,unicode]
# no recursive checks right now
if type(type_) == list:
if not np.iterable(value):
raise CheckError("expected iterable", value)
for x in value:
if not reduce(max, [isinstance(x, t) for t in type_]):
raise CheckError(
"element", x, "of type", type(x), "fails to be of type", type_
)
return value
# for sets, check membership of the type in the set
if type(type_) == set:
for t in type_:
if isinstance(value, t):
return value
raise CheckError("set membership failed", value, type_, var=var) # FIXME var?
# for tuples, check that all conditions are satisfied
if type(type_) == tuple:
for t in type_:
checktype(value, type_)
return value
# callables are just called and should either use assertions or
# explicitly raise CheckError
if callable(type_):
type_(value)
return value
# otherwise, we don't understand the type spec
raise Exception("unknown type spec: %s" % type_)
def checks(*types, **ktypes):
"""Check argument and return types against type specs at runtime."""
def argument_check_decorator(f):
@functools.wraps(f)
def argument_checks(*args, **kw):
# print("@@@", f, "decl", types, ktypes, "call",
# [strc(x) for x in args], kw)
name = f.func_name
argnames = f.func_code.co_varnames[: f.func_code.co_argcount]
kw3 = [(var, value, ktypes.get(var, True)) for var, value in kw.items()]
for var, value, type_ in zip(argnames, args, types) + kw3:
try:
checktype(value, type_)
except AssertionError as e:
raise CheckError(e.message, *e.args, var=var, fun=f)
except CheckError as e:
e.fun = f
e.var = var
raise e
except:
print("unknown exception while checking function:", name)
raise
result = f(*args, **kw)
checktype(result, kw.get("_", True))
return result
return argument_checks
return argument_check_decorator
def makeargcheck(message, warning=0):
"""Converts a predicate into an argcheck."""
def decorator(f):
def wrapper(arg):
if not f(arg):
if warning:
raise CheckWarning(
strc(arg) + " of type " + str(type(arg)) + ": " + str(message)
)
else:
raise CheckError(
strc(arg) + " of type " + str(type(arg)) + ": " + str(message)
)
return wrapper
return decorator
### Here are a whole bunch of type check predicates.
def ALL(*checks):
def CHK_(x):
for check in checks:
check(x)
return CHK_
def ANY(*checks):
def CHK_(x):
for check in checks:
try:
check(x)
return
except:
pass
raise CheckError(x, ": failed all checks:", [strc(x) for x in checks])
return CHK_
@makeargcheck("value should be type book or 0/1")
def BOOL(x):
return isinstance(x, bool) or (isinstance(x, int) and x in [0, 1])
@makeargcheck("value should be an int or a float")
def NUMBER(a):
return isinstance(a, int) or isinstance(a, float)
def RANGE(lo, hi):
@makeargcheck("value out of range [%g,%g]" % (lo, hi))
def RANGE_(x):
return x >= lo and x <= hi
return RANGE_
def ARANK(n):
@makeargcheck("array must have rank %d" % n)
def ARANK_(a):
if not hasattr(a, "ndim"):
return 0
return a.ndim == n
return ARANK_
def ARANGE(lo, hi):
@makeargcheck("array values must be within [%g,%g]" % (lo, hi))
def ARANGE_(a):
return np.amin(a) >= lo and np.amax(a) <= hi
return ARANGE_
@makeargcheck("array elements must be non-negative")
def ANONNEG(a):
return np.amin(a) >= 0
float_dtypes = [np.dtype("float32"), np.dtype("float64")]
try:
float_dtypes += [np.dtype("float96")]
except:
pass
try:
float_dtypes += [np.dtype("float128")]
except:
pass
@makeargcheck("array must contain floating point values")
def AFLOAT(a):
return a.dtype in float_dtypes
int_dtypes = [
np.dtype("uint8"),
np.dtype("int32"),
np.dtype("int64"),
np.dtype("uint32"),
np.dtype("uint64"),
]
@makeargcheck("array must contain integer values")
def AINT(a):
return a.dtype in int_dtypes
@makeargcheck("expected a byte (uint8) array")
def ABYTE(a):
return a.dtype == np.dtype("B")
@makeargcheck("expect tuple of int")
def inttuple(a):
if isinstance(a, int):
return 1
if not (tuple(a) or list(a)):
return 0
for x in a:
if not isinstance(x, int):
return 0
return 1
@makeargcheck("expect tuple of nonnegative int")
def uinttuple(a):
if isinstance(a, int):
return 1
if not (tuple(a) or list(a)):
return 0
for x in a:
if not isinstance(x, int):
return 0
if x < 0:
return 0
return 1
@makeargcheck("expect pair of int")
def uintpair(a):
if not tuple(a):
return 0
if not len(a) == 2:
return 0
if a[0] < 0:
return 0
if a[1] < 0:
return 0
return 1
@makeargcheck("expect a rectangle as a pair of slices")
def RECTANGLE(a):
if not tuple(a):
return 0
if not isinstance(a[0], slice):
return 0
if not isinstance(a[1], slice):
return 0
return 1
### specific kinds of arrays
ARRAY1 = ARANK(1)
ARRAY2 = ARANK(2)
ARRAY3 = ARANK(3)
AINT1 = ALL(ARANK(1), AINT)
AINT2 = ALL(ARANK(2), AINT)
AINT3 = ALL(ARANK(3), AINT)
AFLOAT1 = ALL(ARANK(1), AFLOAT)
AFLOAT2 = ALL(ARANK(2), AFLOAT)
AFLOAT3 = ALL(ARANK(3), AFLOAT)
@makeargcheck("expected a boolean array or an array of 0/1")
def ABINARY(a):
if a.ndim == 2 and a.dtype == np.dtype(bool):
return 1
if not a.dtype in int_dtypes:
return 0
import scipy.ndimage.measurements
zeros, ones = scipy.ndimage.measurements.sum(1, a, [0, 1])
if zeros + ones == a.size:
return 1
if a.dtype == np.dtype("B"):
zeros, ones = scipy.ndimage.measurements.sum(1, a, [0, 255])
if zeros + ones == a.size:
return 1
return 0
ABINARY1 = ALL(ABINARY, ARRAY1)
ABINARY2 = ALL(ABINARY, ARRAY2)
ABINARY3 = ALL(ABINARY, ARRAY3)
def CHANNELS(n):
@makeargcheck("expected %d channels" % n)
def CHANNELS_(a):
return a.shape[-1] == n
return CHANNELS_
GRAYSCALE = AFLOAT2
GRAYSCALE1 = ALL(AFLOAT2, ARANGE(0, 1))
BYTEIMAGE = ALL(ARANK(2), ABYTE)
RGB = ALL(ARANK(3), ABYTE, CHANNELS(3))
RGBA = ALL(ARANK(3), ABYTE, CHANNELS(4))
### image arrays with more complicated image properties
@makeargcheck("expect a light image (median>mean)", warning=1)
def LIGHT(a):
return np.median(a) >= np.mean(a)
@makeargcheck("expect a dark image (median<mean)", warning=1)
def DARK(a):
return np.median(a) <= np.mean(a)
@makeargcheck("expect a page image (larger than 600x600)", warning=1)
def PAGE(a):
return a.ndim == 2 and a.shape[0] >= 600 and a.shape[1] >= 600
@makeargcheck(
"expected a line image (taller than 8 pixels and wider than tall)", warning=1
)
def LINE(a, var=None):
return a.ndim == 2 and a.shape[0] > 8 # and a.shape[1]>a.shape[0]
BINPAGE = ALL(PAGE, ABINARY2)
LIGHTPAGE = ALL(PAGE, LIGHT)
DARKPAGE = ALL(PAGE, DARK)
LIGHTLINE = ALL(LINE, LIGHT)
DARKLINE = ALL(LINE, DARK)
@makeargcheck("expected a small grayscale patch with values between 0 and 1")
def PATCH(a):
GRAYSCALE1(a)
return a.shape[0] <= 256 and a.shape[1] <= 256
### segmentation-related checks
###
### Segmentations come in two flavors: with a white background (for writing to disk
### so that one can see something in file browsers), and with a black background
### (for easy processing). Light segmentations should only exist on disk.
@makeargcheck("expected a segmentation image")
def SEGMENTATION(a):
return isinstance(a, np.ndarray) and a.ndim == 2 and a.dtype in int_dtypes
@makeargcheck("expected a segmentation with white background")
def WHITESEG(a):
return np.amax(a) == 0xFFFFFF
@makeargcheck("expected a segmentation with black background")
def BLACKSEG(a):
return np.amax(a) < 0xFFFFFF
@makeargcheck("all non-zero pixels in a page segmentation must have a column value >0")
def PAGEEXTRA(a):
u = np.unique(a)
u = u[u != 0]
u = u[(u & 0xFF0000) == 0]
return len(u) == 0
LIGHTSEG = ALL(SEGMENTATION, WHITESEG)
DARKSEG = ALL(SEGMENTATION, BLACKSEG)
PAGESEG = ALL(SEGMENTATION, BLACKSEG, PAGE, PAGEEXTRA)
LINESEG = ALL(SEGMENTATION, BLACKSEG, LINE)
LIGHTPAGESEG = ALL(SEGMENTATION, WHITESEG, PAGE)
LIGHTLINESEG = ALL(SEGMENTATION, WHITESEG, LINE)
### special types for pattern recognition
def TDATASET(a):
if type(a[0]) != np.ndarray:
raise CheckError("dataset fails to yield ndarray on subscripting")
def DATASET_SIZE(lo=3, hi=int(1e9)):
@makeargcheck("data set size should be between %s and %s" % (lo, hi))
def DSSIZE_(a):
return len(a) >= lo and len(a) <= hi
return DSSIZE_
def DATASET_VRANK(n):
@makeargcheck("data set vectors should have a rank of %d" % n)
def DSVRANK_(a):
return n < 0 or a[0].ndim == n
return DSVRANK_
def DATASET_VSIZE(lo, hi):
@makeargcheck("data vector size should be between %d and %d" % (lo, hi))
def DSVSIZE_(a):
return a[0].size >= lo and a[0].size <= hi
return DSVSIZE_
def DATASET_VRANGE(lo, hi):
@makeargcheck("data set values should be in the range of %g to %g" % (lo, hi))
def DSVRANGE_(a):
# just a quick sanity check
return np.amin(a[0]) >= lo and np.amax(a[0]) <= hi
return DSVRANGE_
def DATASET(
size0=3,
size1=int(1e9),
vsize0=2,
vsize1=100000,
vrank=-1,
vrange0=-300,
vrange1=300,
fixedshape=0,
):
return ALL(
TDATASET,
DATASET_SIZE(size0, size1),
DATASET_VRANK(vrank),
DATASET_VSIZE(vsize0, vsize1),
DATASET_VRANGE(vrange0, vrange1),
)
|
meshes | mesh_buckling_plate_tria6 | def create_nodes(femmesh):
# nodes
femmesh.addNode(0.0, 0.0, 0.0, 1)
femmesh.addNode(0.0, 6000.0, 0.0, 2)
femmesh.addNode(8000.0, 0.0, 0.0, 3)
femmesh.addNode(8000.0, 6000.0, 0.0, 4)
femmesh.addNode(0.0, 300.0, 0.0, 5)
femmesh.addNode(0.0, 600.0, 0.0, 6)
femmesh.addNode(0.0, 900.0, 0.0, 7)
femmesh.addNode(0.0, 1200.0, 0.0, 8)
femmesh.addNode(0.0, 1500.0, 0.0, 9)
femmesh.addNode(0.0, 1800.0, 0.0, 10)
femmesh.addNode(0.0, 2100.0, 0.0, 11)
femmesh.addNode(0.0, 2400.0, 0.0, 12)
femmesh.addNode(0.0, 2700.0, 0.0, 13)
femmesh.addNode(0.0, 3000.0, 0.0, 14)
femmesh.addNode(0.0, 3300.0, 0.0, 15)
femmesh.addNode(0.0, 3600.0, 0.0, 16)
femmesh.addNode(0.0, 3900.0, 0.0, 17)
femmesh.addNode(0.0, 4200.0, 0.0, 18)
femmesh.addNode(0.0, 4500.0, 0.0, 19)
femmesh.addNode(0.0, 4800.0, 0.0, 20)
femmesh.addNode(0.0, 5100.0, 0.0, 21)
femmesh.addNode(0.0, 5400.0, 0.0, 22)
femmesh.addNode(0.0, 5700.0, 0.0, 23)
femmesh.addNode(0.0, 150.0, 0.0, 24)
femmesh.addNode(0.0, 450.0, 0.0, 25)
femmesh.addNode(0.0, 750.0, 0.0, 26)
femmesh.addNode(0.0, 1050.0, 0.0, 27)
femmesh.addNode(0.0, 1350.0, 0.0, 28)
femmesh.addNode(0.0, 1650.0, 0.0, 29)
femmesh.addNode(0.0, 1950.0, 0.0, 30)
femmesh.addNode(0.0, 2250.0, 0.0, 31)
femmesh.addNode(0.0, 2550.0, 0.0, 32)
femmesh.addNode(0.0, 2850.0, 0.0, 33)
femmesh.addNode(0.0, 3150.0, 0.0, 34)
femmesh.addNode(0.0, 3450.0, 0.0, 35)
femmesh.addNode(0.0, 3750.0, 0.0, 36)
femmesh.addNode(0.0, 4050.0, 0.0, 37)
femmesh.addNode(0.0, 4350.0, 0.0, 38)
femmesh.addNode(0.0, 4650.0, 0.0, 39)
femmesh.addNode(0.0, 4950.0, 0.0, 40)
femmesh.addNode(0.0, 5250.0, 0.0, 41)
femmesh.addNode(0.0, 5550.0, 0.0, 42)
femmesh.addNode(0.0, 5850.0, 0.0, 43)
femmesh.addNode(296.29629629629585, 0.0, 0.0, 44)
femmesh.addNode(592.5925925925918, 0.0, 0.0, 45)
femmesh.addNode(888.8888888888874, 0.0, 0.0, 46)
femmesh.addNode(1185.1851851851832, 0.0, 0.0, 47)
femmesh.addNode(1481.4814814814792, 0.0, 0.0, 48)
femmesh.addNode(1777.7777777777753, 0.0, 0.0, 49)
femmesh.addNode(2074.0740740740716, 0.0, 0.0, 50)
femmesh.addNode(2370.370370370368, 0.0, 0.0, 51)
femmesh.addNode(2666.666666666663, 0.0, 0.0, 52)
femmesh.addNode(2962.962962962958, 0.0, 0.0, 53)
femmesh.addNode(3259.259259259253, 0.0, 0.0, 54)
femmesh.addNode(3555.555555555548, 0.0, 0.0, 55)
femmesh.addNode(3851.851851851843, 0.0, 0.0, 56)
femmesh.addNode(4148.148148148138, 0.0, 0.0, 57)
femmesh.addNode(4444.4444444444325, 0.0, 0.0, 58)
femmesh.addNode(4740.740740740728, 0.0, 0.0, 59)
femmesh.addNode(5037.037037037026, 0.0, 0.0, 60)
femmesh.addNode(5333.333333333322, 0.0, 0.0, 61)
femmesh.addNode(5629.6296296296205, 0.0, 0.0, 62)
femmesh.addNode(5925.925925925918, 0.0, 0.0, 63)
femmesh.addNode(6222.222222222214, 0.0, 0.0, 64)
femmesh.addNode(6518.518518518513, 0.0, 0.0, 65)
femmesh.addNode(6814.81481481481, 0.0, 0.0, 66)
femmesh.addNode(7111.111111111108, 0.0, 0.0, 67)
femmesh.addNode(7407.407407407406, 0.0, 0.0, 68)
femmesh.addNode(7703.703703703703, 0.0, 0.0, 69)
femmesh.addNode(148.14814814814792, 0.0, 0.0, 70)
femmesh.addNode(444.44444444444383, 0.0, 0.0, 71)
femmesh.addNode(740.7407407407396, 0.0, 0.0, 72)
femmesh.addNode(1037.0370370370354, 0.0, 0.0, 73)
femmesh.addNode(1333.3333333333312, 0.0, 0.0, 74)
femmesh.addNode(1629.6296296296273, 0.0, 0.0, 75)
femmesh.addNode(1925.9259259259236, 0.0, 0.0, 76)
femmesh.addNode(2222.22222222222, 0.0, 0.0, 77)
femmesh.addNode(2518.5185185185155, 0.0, 0.0, 78)
femmesh.addNode(2814.8148148148102, 0.0, 0.0, 79)
femmesh.addNode(3111.111111111106, 0.0, 0.0, 80)
femmesh.addNode(3407.4074074074006, 0.0, 0.0, 81)
femmesh.addNode(3703.7037037036953, 0.0, 0.0, 82)
femmesh.addNode(3999.9999999999905, 0.0, 0.0, 83)
femmesh.addNode(4296.296296296286, 0.0, 0.0, 84)
femmesh.addNode(4592.59259259258, 0.0, 0.0, 85)
femmesh.addNode(4888.888888888877, 0.0, 0.0, 86)
femmesh.addNode(5185.185185185173, 0.0, 0.0, 87)
femmesh.addNode(5481.481481481472, 0.0, 0.0, 88)
femmesh.addNode(5777.777777777769, 0.0, 0.0, 89)
femmesh.addNode(6074.074074074066, 0.0, 0.0, 90)
femmesh.addNode(6370.370370370364, 0.0, 0.0, 91)
femmesh.addNode(6666.6666666666615, 0.0, 0.0, 92)
femmesh.addNode(6962.962962962959, 0.0, 0.0, 93)
femmesh.addNode(7259.259259259257, 0.0, 0.0, 94)
femmesh.addNode(7555.555555555554, 0.0, 0.0, 95)
femmesh.addNode(7851.851851851851, 0.0, 0.0, 96)
femmesh.addNode(8000.0, 300.0, 0.0, 97)
femmesh.addNode(8000.0, 600.0, 0.0, 98)
femmesh.addNode(8000.0, 900.0, 0.0, 99)
femmesh.addNode(8000.0, 1200.0, 0.0, 100)
femmesh.addNode(8000.0, 1500.0, 0.0, 101)
femmesh.addNode(8000.0, 1800.0, 0.0, 102)
femmesh.addNode(8000.0, 2100.0, 0.0, 103)
femmesh.addNode(8000.0, 2400.0, 0.0, 104)
femmesh.addNode(8000.0, 2700.0, 0.0, 105)
femmesh.addNode(8000.0, 3000.0, 0.0, 106)
femmesh.addNode(8000.0, 3300.0, 0.0, 107)
femmesh.addNode(8000.0, 3600.0, 0.0, 108)
femmesh.addNode(8000.0, 3900.0, 0.0, 109)
femmesh.addNode(8000.0, 4200.0, 0.0, 110)
femmesh.addNode(8000.0, 4500.0, 0.0, 111)
femmesh.addNode(8000.0, 4800.0, 0.0, 112)
femmesh.addNode(8000.0, 5100.0, 0.0, 113)
femmesh.addNode(8000.0, 5400.0, 0.0, 114)
femmesh.addNode(8000.0, 5700.0, 0.0, 115)
femmesh.addNode(8000.0, 150.0, 0.0, 116)
femmesh.addNode(8000.0, 450.0, 0.0, 117)
femmesh.addNode(8000.0, 750.0, 0.0, 118)
femmesh.addNode(8000.0, 1050.0, 0.0, 119)
femmesh.addNode(8000.0, 1350.0, 0.0, 120)
femmesh.addNode(8000.0, 1650.0, 0.0, 121)
femmesh.addNode(8000.0, 1950.0, 0.0, 122)
femmesh.addNode(8000.0, 2250.0, 0.0, 123)
femmesh.addNode(8000.0, 2550.0, 0.0, 124)
femmesh.addNode(8000.0, 2850.0, 0.0, 125)
femmesh.addNode(8000.0, 3150.0, 0.0, 126)
femmesh.addNode(8000.0, 3450.0, 0.0, 127)
femmesh.addNode(8000.0, 3750.0, 0.0, 128)
femmesh.addNode(8000.0, 4050.0, 0.0, 129)
femmesh.addNode(8000.0, 4350.0, 0.0, 130)
femmesh.addNode(8000.0, 4650.0, 0.0, 131)
femmesh.addNode(8000.0, 4950.0, 0.0, 132)
femmesh.addNode(8000.0, 5250.0, 0.0, 133)
femmesh.addNode(8000.0, 5550.0, 0.0, 134)
femmesh.addNode(8000.0, 5850.0, 0.0, 135)
femmesh.addNode(296.29629629629585, 6000.0, 0.0, 136)
femmesh.addNode(592.5925925925918, 6000.0, 0.0, 137)
femmesh.addNode(888.8888888888874, 6000.0, 0.0, 138)
femmesh.addNode(1185.1851851851832, 6000.0, 0.0, 139)
femmesh.addNode(1481.4814814814792, 6000.0, 0.0, 140)
femmesh.addNode(1777.7777777777753, 6000.0, 0.0, 141)
femmesh.addNode(2074.0740740740716, 6000.0, 0.0, 142)
femmesh.addNode(2370.370370370368, 6000.0, 0.0, 143)
femmesh.addNode(2666.666666666663, 6000.0, 0.0, 144)
femmesh.addNode(2962.962962962958, 6000.0, 0.0, 145)
femmesh.addNode(3259.259259259253, 6000.0, 0.0, 146)
femmesh.addNode(3555.555555555548, 6000.0, 0.0, 147)
femmesh.addNode(3851.851851851843, 6000.0, 0.0, 148)
femmesh.addNode(4148.148148148138, 6000.0, 0.0, 149)
femmesh.addNode(4444.4444444444325, 6000.0, 0.0, 150)
femmesh.addNode(4740.740740740728, 6000.0, 0.0, 151)
femmesh.addNode(5037.037037037026, 6000.0, 0.0, 152)
femmesh.addNode(5333.333333333322, 6000.0, 0.0, 153)
femmesh.addNode(5629.6296296296205, 6000.0, 0.0, 154)
femmesh.addNode(5925.925925925918, 6000.0, 0.0, 155)
femmesh.addNode(6222.222222222214, 6000.0, 0.0, 156)
femmesh.addNode(6518.518518518513, 6000.0, 0.0, 157)
femmesh.addNode(6814.81481481481, 6000.0, 0.0, 158)
femmesh.addNode(7111.111111111108, 6000.0, 0.0, 159)
femmesh.addNode(7407.407407407406, 6000.0, 0.0, 160)
femmesh.addNode(7703.703703703703, 6000.0, 0.0, 161)
femmesh.addNode(148.14814814814792, 6000.0, 0.0, 162)
femmesh.addNode(444.44444444444383, 6000.0, 0.0, 163)
femmesh.addNode(740.7407407407396, 6000.0, 0.0, 164)
femmesh.addNode(1037.0370370370354, 6000.0, 0.0, 165)
femmesh.addNode(1333.3333333333312, 6000.0, 0.0, 166)
femmesh.addNode(1629.6296296296273, 6000.0, 0.0, 167)
femmesh.addNode(1925.9259259259236, 6000.0, 0.0, 168)
femmesh.addNode(2222.22222222222, 6000.0, 0.0, 169)
femmesh.addNode(2518.5185185185155, 6000.0, 0.0, 170)
femmesh.addNode(2814.8148148148102, 6000.0, 0.0, 171)
femmesh.addNode(3111.111111111106, 6000.0, 0.0, 172)
femmesh.addNode(3407.4074074074006, 6000.0, 0.0, 173)
femmesh.addNode(3703.7037037036953, 6000.0, 0.0, 174)
femmesh.addNode(3999.9999999999905, 6000.0, 0.0, 175)
femmesh.addNode(4296.296296296286, 6000.0, 0.0, 176)
femmesh.addNode(4592.59259259258, 6000.0, 0.0, 177)
femmesh.addNode(4888.888888888877, 6000.0, 0.0, 178)
femmesh.addNode(5185.185185185173, 6000.0, 0.0, 179)
femmesh.addNode(5481.481481481472, 6000.0, 0.0, 180)
femmesh.addNode(5777.777777777769, 6000.0, 0.0, 181)
femmesh.addNode(6074.074074074066, 6000.0, 0.0, 182)
femmesh.addNode(6370.370370370364, 6000.0, 0.0, 183)
femmesh.addNode(6666.6666666666615, 6000.0, 0.0, 184)
femmesh.addNode(6962.962962962959, 6000.0, 0.0, 185)
femmesh.addNode(7259.259259259257, 6000.0, 0.0, 186)
femmesh.addNode(7555.555555555554, 6000.0, 0.0, 187)
femmesh.addNode(7851.851851851851, 6000.0, 0.0, 188)
femmesh.addNode(3397.675066189013, 2995.934030581119, 0.0, 189)
femmesh.addNode(5657.1164021163995, 3656.594160297862, 0.0, 190)
femmesh.addNode(4950.416162456272, 1895.5621403740552, 0.0, 191)
femmesh.addNode(1893.6104054596278, 4113.981242140242, 0.0, 192)
femmesh.addNode(1872.4714864952516, 1842.7996085860893, 0.0, 193)
femmesh.addNode(4185.972932127833, 4396.738427421472, 0.0, 194)
femmesh.addNode(6447.307132798734, 2329.735043709901, 0.0, 195)
femmesh.addNode(3402.516938748461, 1517.956170752087, 0.0, 196)
femmesh.addNode(6629.034405789423, 4643.278384787789, 0.0, 197)
femmesh.addNode(3033.3166107475263, 4655.661389078079, 0.0, 198)
femmesh.addNode(1287.3985866605758, 2987.824421819152, 0.0, 199)
femmesh.addNode(5955.974285979601, 1196.7804284982203, 0.0, 200)
femmesh.addNode(4608.739785878135, 3045.546187687468, 0.0, 201)
femmesh.addNode(5340.334238522032, 4806.896193286155, 0.0, 202)
femmesh.addNode(6811.315625530508, 3387.1905777588804, 0.0, 203)
femmesh.addNode(1101.9837017424315, 4896.897669975468, 0.0, 204)
femmesh.addNode(1101.9837017424318, 1103.1023300245324, 0.0, 205)
femmesh.addNode(2400.271622137972, 2857.386398815501, 0.0, 206)
femmesh.addNode(4284.361837816873, 1031.6656017058608, 0.0, 207)
femmesh.addNode(2478.7599581374348, 1032.410098352988, 0.0, 208)
femmesh.addNode(6957.046136624422, 1033.6408541603678, 0.0, 209)
femmesh.addNode(4036.751618466364, 2276.157754556386, 0.0, 210)
femmesh.addNode(2813.7600959689394, 3763.3398184475423, 0.0, 211)
femmesh.addNode(5526.041899263087, 2675.1490601747937, 0.0, 212)
femmesh.addNode(2087.3567941456886, 5051.849438915983, 0.0, 213)
femmesh.addNode(922.4074414497819, 2101.1567771400555, 0.0, 214)
femmesh.addNode(974.0422185325958, 3912.8247971981245, 0.0, 215)
femmesh.addNode(2743.586624399766, 2071.4546368160372, 0.0, 216)
femmesh.addNode(3765.2432557370757, 5152.552577516728, 0.0, 217)
femmesh.addNode(5179.825211944617, 842.7358197532764, 0.0, 218)
femmesh.addNode(7161.146696923641, 1860.8838419302124, 0.0, 219)
femmesh.addNode(3650.614686079948, 3793.899049835917, 0.0, 220)
femmesh.addNode(4622.163182662431, 5198.554136222392, 0.0, 221)
femmesh.addNode(4849.157741854054, 3859.795231713585, 0.0, 222)
femmesh.addNode(7183.217668730613, 2689.168961044971, 0.0, 223)
femmesh.addNode(6016.951150744347, 5178.092072492946, 0.0, 224)
femmesh.addNode(7180.890501881602, 5191.352542912435, 0.0, 225)
femmesh.addNode(7237.323529050707, 4159.19359802075, 0.0, 226)
femmesh.addNode(3606.4227449015343, 787.3721600011913, 0.0, 227)
femmesh.addNode(5713.982860463556, 1938.8090157441297, 0.0, 228)
femmesh.addNode(1756.7081232879061, 784.9829985050347, 0.0, 229)
femmesh.addNode(5928.931994200185, 4306.400483322682, 0.0, 230)
femmesh.addNode(6157.2654571960275, 3100.1994077631202, 0.0, 231)
femmesh.addNode(1851.6605441211916, 3411.1415779437475, 0.0, 232)
femmesh.addNode(6490.357652058749, 1651.7558837325553, 0.0, 233)
femmesh.addNode(6361.0453067015915, 691.7471420781542, 0.0, 234)
femmesh.addNode(3387.3446151833773, 2292.28606491495, 0.0, 235)
femmesh.addNode(2725.6609598761743, 5286.145622664251, 0.0, 236)
femmesh.addNode(657.1616804008489, 2682.4095085782874, 0.0, 237)
femmesh.addNode(1769.0646642165354, 2531.7018154860525, 0.0, 238)
femmesh.addNode(4241.048725984901, 3562.2042509829134, 0.0, 239)
femmesh.addNode(6447.836877657186, 4060.838068266804, 0.0, 240)
femmesh.addNode(2997.2658727270814, 645.8394329708688, 0.0, 241)
femmesh.addNode(3569.404821855382, 4426.604571366504, 0.0, 242)
femmesh.addNode(717.2819816319766, 631.4118928754859, 0.0, 243)
femmesh.addNode(683.9644443642508, 5338.739943732482, 0.0, 244)
femmesh.addNode(4040.8704915388403, 1605.40586038152, 0.0, 245)
femmesh.addNode(2480.2860811929954, 4327.634132798396, 0.0, 246)
femmesh.addNode(1550.8369065130175, 5323.670460339219, 0.0, 247)
femmesh.addNode(638.2111157671299, 4456.2851347090345, 0.0, 248)
femmesh.addNode(645.0144875487056, 1529.3931506962488, 0.0, 249)
femmesh.addNode(606.0899831585815, 3274.4917946223304, 0.0, 250)
femmesh.addNode(5746.248746872634, 602.7107460703565, 0.0, 251)
femmesh.addNode(4024.1819322631945, 2882.217777079039, 0.0, 252)
femmesh.addNode(4934.4309702782775, 2519.1809619784544, 0.0, 253)
femmesh.addNode(5214.473879473443, 3217.022143244332, 0.0, 254)
femmesh.addNode(5344.474372306417, 1433.0227093810447, 0.0, 255)
femmesh.addNode(5185.185185185174, 5402.558251627225, 0.0, 256)
femmesh.addNode(4838.704000595254, 4432.085273325963, 0.0, 257)
femmesh.addNode(6591.41004163284, 5367.3326714702025, 0.0, 258)
femmesh.addNode(7411.25451639115, 596.1961450493792, 0.0, 259)
femmesh.addNode(5392.529206098521, 4254.269772712942, 0.0, 260)
femmesh.addNode(7432.413552786258, 3248.9659932976074, 0.0, 261)
femmesh.addNode(1619.6939316495213, 1300.3719347779015, 0.0, 262)
femmesh.addNode(2835.3175846871354, 3148.619933239689, 0.0, 263)
femmesh.addNode(1646.535354557444, 4659.675491871235, 0.0, 264)
femmesh.addNode(2825.114231337695, 1500.5142187870686, 0.0, 265)
femmesh.addNode(7431.546705667136, 4661.610494463315, 0.0, 266)
femmesh.addNode(3263.769879015891, 5439.490826087185, 0.0, 267)
femmesh.addNode(4684.368677470502, 606.1672674234318, 0.0, 268)
femmesh.addNode(7445.043104270031, 1338.3109649162725, 0.0, 269)
femmesh.addNode(1266.6159128197107, 529.0026539507099, 0.0, 270)
femmesh.addNode(2894.3063699430318, 2599.204848452612, 0.0, 271)
femmesh.addNode(4775.466605021726, 1425.2175854203888, 0.0, 272)
femmesh.addNode(6649.093544781434, 2830.957997230098, 0.0, 273)
femmesh.addNode(2255.730762865147, 2320.1476915303238, 0.0, 274)
femmesh.addNode(4128.998776673298, 508.32959442572815, 0.0, 275)
femmesh.addNode(2268.3197941928315, 532.6087740901199, 0.0, 276)
femmesh.addNode(1494.5084166412269, 3795.1972419759486, 0.0, 277)
femmesh.addNode(2270.2438945572353, 1541.2905631898257, 0.0, 278)
femmesh.addNode(3102.2598007284423, 4208.58580621284, 0.0, 279)
femmesh.addNode(2272.4869687423907, 3775.809362934254, 0.0, 280)
femmesh.addNode(1249.1086797286632, 1616.2422721486678, 0.0, 281)
femmesh.addNode(1194.5487003459718, 4404.719080046412, 0.0, 282)
femmesh.addNode(7481.424544110604, 2279.598230715955, 0.0, 283)
femmesh.addNode(4221.873252660906, 5485.354618892098, 0.0, 284)
femmesh.addNode(6837.50177198781, 500.8695378626019, 0.0, 285)
femmesh.addNode(4435.206690695379, 1952.623231449892, 0.0, 286)
femmesh.addNode(4435.863231127135, 2541.7665395151253, 0.0, 287)
femmesh.addNode(3222.60025912073, 3430.8441977764146, 0.0, 288)
femmesh.addNode(3794.309440521653, 3323.2397059701725, 0.0, 289)
femmesh.addNode(5935.35863180151, 2368.6014943950568, 0.0, 290)
femmesh.addNode(6159.350257910878, 3605.0654382216726, 0.0, 291)
femmesh.addNode(1453.3196818484107, 2142.5323902878413, 0.0, 292)
femmesh.addNode(3658.41793414081, 1925.187396680512, 0.0, 293)
femmesh.addNode(2222.222222222219, 5526.419323570788, 0.0, 294)
femmesh.addNode(4203.249322996915, 4911.825239320628, 0.0, 295)
femmesh.addNode(477.56674021983457, 2245.739760834411, 0.0, 296)
femmesh.addNode(5679.630489410519, 5513.93397676138, 0.0, 297)
femmesh.addNode(7534.407410028579, 3738.5928525453073, 0.0, 298)
femmesh.addNode(5302.105821445276, 2238.951353101928, 0.0, 299)
femmesh.addNode(455.60537461227864, 4015.3740286760267, 0.0, 300)
femmesh.addNode(2521.8661694161806, 4807.703973058636, 0.0, 301)
femmesh.addNode(6500.5780450773245, 1143.7331668716852, 0.0, 302)
femmesh.addNode(3113.505831612044, 1113.9279912480858, 0.0, 303)
femmesh.addNode(6938.8514360835, 2309.129525240316, 0.0, 304)
femmesh.addNode(1172.437325575929, 2529.0884365994834, 0.0, 305)
femmesh.addNode(1095.0464929086543, 5519.79915794031, 0.0, 306)
femmesh.addNode(7540.017932967519, 5538.335041248212, 0.0, 307)
femmesh.addNode(481.7853096084257, 1103.6339161833605, 0.0, 308)
femmesh.addNode(482.2266217807585, 4893.646426325952, 0.0, 309)
femmesh.addNode(1178.9305753167218, 3455.084853708434, 0.0, 310)
femmesh.addNode(5656.077683368319, 3143.1241199906663, 0.0, 311)
femmesh.addNode(3768.0099646627423, 1201.4317309018002, 0.0, 312)
femmesh.addNode(3176.975472281174, 1904.2763839869097, 0.0, 313)
femmesh.addNode(5803.882652287688, 4783.805453238971, 0.0, 314)
femmesh.addNode(2114.1878090878636, 4571.661957778536, 0.0, 315)
femmesh.addNode(6180.499666359125, 1976.8703310582544, 0.0, 316)
femmesh.addNode(2404.7221638079945, 3311.809044588913, 0.0, 317)
femmesh.addNode(3703.189057704747, 5588.803232125313, 0.0, 318)
femmesh.addNode(2035.7445392001036, 1119.4562251458779, 0.0, 319)
femmesh.addNode(6926.1946899518225, 1474.0564103457014, 0.0, 320)
femmesh.addNode(4029.5338651416737, 3996.367012511001, 0.0, 321)
femmesh.addNode(1893.8877094741165, 2966.2822019009022, 0.0, 322)
femmesh.addNode(3428.3194594898437, 4880.707091435496, 0.0, 323)
femmesh.addNode(5354.509432766604, 439.2063291726035, 0.0, 324)
femmesh.addNode(7583.649470251285, 2862.166033957777, 0.0, 325)
femmesh.addNode(3367.759405746351, 420.0748062916648, 0.0, 326)
femmesh.addNode(4786.929259037574, 5570.325435420024, 0.0, 327)
femmesh.addNode(4954.97425049191, 4917.102596761554, 0.0, 328)
femmesh.addNode(6238.340888026186, 4809.087427725724, 0.0, 329)
femmesh.addNode(7024.433674904425, 4804.772199991581, 0.0, 330)
femmesh.addNode(4478.581235074521, 4069.9117306609655, 0.0, 331)
femmesh.addNode(5569.431810033599, 993.7579210576936, 0.0, 332)
femmesh.addNode(4802.148254473836, 3435.796687607469, 0.0, 333)
femmesh.addNode(5228.302516957269, 3645.750156453162, 0.0, 334)
femmesh.addNode(3657.8148639814453, 2643.520783890189, 0.0, 335)
femmesh.addNode(7019.076693738983, 5576.332053786248, 0.0, 336)
femmesh.addNode(7573.6082130647155, 1763.2539353761488, 0.0, 337)
femmesh.addNode(6240.677755204676, 5576.951209655838, 0.0, 338)
femmesh.addNode(6860.343772307708, 4301.31606715292, 0.0, 339)
femmesh.addNode(6243.91143599323, 2687.122440338326, 0.0, 340)
femmesh.addNode(7634.874461019663, 5055.900701239916, 0.0, 341)
femmesh.addNode(2658.8071340610913, 395.0225543628404, 0.0, 342)
femmesh.addNode(7549.03268297424, 934.7115216601442, 0.0, 343)
femmesh.addNode(6902.215957905839, 3845.0291642762386, 0.0, 344)
femmesh.addNode(7025.59003468577, 3082.21047851184, 0.0, 345)
femmesh.addNode(7584.06159843794, 4253.165203468226, 0.0, 346)
femmesh.addNode(1569.839297525874, 303.8313638308509, 0.0, 347)
femmesh.addNode(4469.621538372686, 1512.7283176410292, 0.0, 348)
femmesh.addNode(6095.902011993467, 377.2371543174669, 0.0, 349)
femmesh.addNode(2403.6969246794406, 1872.7602557566709, 0.0, 350)
femmesh.addNode(5948.375649812166, 3996.9838710873087, 0.0, 351)
femmesh.addNode(4781.183338415543, 941.2715323463972, 0.0, 352)
femmesh.addNode(407.1231268370476, 3024.859353790048, 0.0, 353)
femmesh.addNode(5896.032305517452, 1607.6587417561202, 0.0, 354)
femmesh.addNode(3800.7427921477174, 376.73372396162324, 0.0, 355)
femmesh.addNode(4245.013758272589, 3192.4890176916206, 0.0, 356)
femmesh.addNode(6350.501198385864, 4314.729623053648, 0.0, 357)
femmesh.addNode(5528.242804228755, 5074.10907722706, 0.0, 358)
femmesh.addNode(393.49683907692514, 1857.1094669769416, 0.0, 359)
femmesh.addNode(3837.3847068674945, 4725.522234842084, 0.0, 360)
femmesh.addNode(6650.048741944859, 1920.4048544080372, 0.0, 361)
femmesh.addNode(5336.467319484916, 1825.0684300956189, 0.0, 362)
femmesh.addNode(1837.5229945063875, 5643.078707336442, 0.0, 363)
femmesh.addNode(4945.651206791982, 2881.6961694004817, 0.0, 364)
femmesh.addNode(6696.11805675985, 5078.574081412251, 0.0, 365)
femmesh.addNode(3008.441213646821, 5119.911067458521, 0.0, 366)
femmesh.addNode(884.1306548849702, 3053.8376993440547, 0.0, 367)
femmesh.addNode(4562.386541644964, 4741.481920990653, 0.0, 368)
femmesh.addNode(4651.092496390126, 2254.2313235399834, 0.0, 369)
femmesh.addNode(1472.4365299676333, 3340.810088616796, 0.0, 370)
femmesh.addNode(1471.8549770258608, 1000.9516164765802, 0.0, 371)
femmesh.addNode(5599.34672594549, 4548.619957372794, 0.0, 372)
femmesh.addNode(6448.819526273548, 3351.4327801206664, 0.0, 373)
femmesh.addNode(3256.7405344520134, 3859.598342226095, 0.0, 374)
femmesh.addNode(392.86172127684944, 355.64193876170407, 0.0, 375)
femmesh.addNode(375.15126669697383, 5625.79938908758, 0.0, 376)
femmesh.addNode(1442.1012987574402, 4141.322487978434, 0.0, 377)
femmesh.addNode(2957.7309948022894, 5628.918535121977, 0.0, 378)
femmesh.addNode(4009.445808752669, 900.9777206491683, 0.0, 379)
femmesh.addNode(7197.327269328491, 3572.6995068346623, 0.0, 380)
femmesh.addNode(894.6059541322886, 358.7338188301992, 0.0, 381)
femmesh.addNode(430.6916645551602, 3560.1693795597657, 0.0, 382)
femmesh.addNode(1711.5670273114922, 5008.297488116714, 0.0, 383)
femmesh.addNode(5895.74601206341, 2805.958293007776, 0.0, 384)
femmesh.addNode(6100.17962473483, 882.1151603930263, 0.0, 385)
femmesh.addNode(4995.051996249644, 426.75811586475294, 0.0, 386)
femmesh.addNode(1564.262259208933, 1699.0540211769903, 0.0, 387)
femmesh.addNode(3193.136252750478, 2737.80932266654, 0.0, 388)
femmesh.addNode(2539.8199062579647, 2547.1529707588747, 0.0, 389)
femmesh.addNode(6550.698578808808, 348.8627944392093, 0.0, 390)
femmesh.addNode(1817.283909345593, 3770.740176561226, 0.0, 391)
femmesh.addNode(2555.966991266524, 5658.54834676069, 0.0, 392)
femmesh.addNode(7090.020341851579, 4365.621854931105, 0.0, 393)
femmesh.addNode(336.3574764539135, 2575.450172238827, 0.0, 394)
femmesh.addNode(5186.238809766994, 4487.186813822886, 0.0, 395)
femmesh.addNode(7648.548186063908, 350.7937439880927, 0.0, 396)
femmesh.addNode(3461.5562949275245, 1069.2217280332845, 0.0, 397)
femmesh.addNode(1997.485473037803, 1563.2079501336295, 0.0, 398)
femmesh.addNode(1992.4129851047553, 342.158041424327, 0.0, 399)
femmesh.addNode(971.1205318554096, 776.876012694342, 0.0, 400)
femmesh.addNode(2975.6516658450573, 2285.1446050674276, 0.0, 401)
femmesh.addNode(5025.513618133736, 4181.708582771429, 0.0, 402)
femmesh.addNode(5151.743808818835, 1146.1563419902773, 0.0, 403)
femmesh.addNode(2548.898661746062, 4003.292929161928, 0.0, 404)
femmesh.addNode(2836.6926337823834, 4418.8050771149265, 0.0, 405)
femmesh.addNode(1920.5400397275678, 2218.9470149814993, 0.0, 406)
femmesh.addNode(2813.2330483403935, 940.0172732983631, 0.0, 407)
femmesh.addNode(6263.116396852106, 1381.025081358192, 0.0, 408)
femmesh.addNode(4431.5335477157805, 387.14720536155346, 0.0, 409)
femmesh.addNode(1580.0475343627381, 2838.3953290897625, 0.0, 410)
femmesh.addNode(6676.235021893778, 842.8275912399433, 0.0, 411)
femmesh.addNode(707.9748958240762, 5682.972567815318, 0.0, 412)
femmesh.addNode(3733.71870076103, 4107.870463918293, 0.0, 413)
femmesh.addNode(229.60103168917135, 756.8154402733644, 0.0, 414)
femmesh.addNode(228.0033973830223, 5242.459925168885, 0.0, 415)
femmesh.addNode(7145.901703750549, 366.6169532222625, 0.0, 416)
femmesh.addNode(2089.740237344564, 2622.54767610804, 0.0, 417)
femmesh.addNode(956.5211885818039, 5227.599518081492, 0.0, 418)
femmesh.addNode(4455.838457668975, 5657.750829458527, 0.0, 419)
femmesh.addNode(810.4576724031075, 1221.4131645302532, 0.0, 420)
femmesh.addNode(800.4168705293669, 4768.602180867976, 0.0, 421)
femmesh.addNode(327.64594779543205, 4306.874959379801, 0.0, 422)
femmesh.addNode(2424.140768923782, 5128.309569458421, 0.0, 423)
femmesh.addNode(1371.9487665668998, 5663.517383947134, 0.0, 424)
femmesh.addNode(324.65023660724717, 1392.3454233802788, 0.0, 425)
femmesh.addNode(6448.10060698228, 5132.244259251715, 0.0, 426)
femmesh.addNode(3665.660531632448, 2283.744291028161, 0.0, 427)
femmesh.addNode(2687.556746104989, 2861.467062185074, 0.0, 428)
femmesh.addNode(6587.483319713963, 3683.957244081274, 0.0, 429)
femmesh.addNode(5892.420190147987, 3374.2837938799858, 0.0, 430)
femmesh.addNode(2230.4151123596644, 4122.5127535582005, 0.0, 431)
femmesh.addNode(7680.876160864512, 2585.1649045001727, 0.0, 432)
femmesh.addNode(893.6016602732036, 1773.9528382411743, 0.0, 433)
femmesh.addNode(4584.014275220912, 3642.1518833407285, 0.0, 434)
femmesh.addNode(881.7373010169938, 4243.37390096147, 0.0, 435)
femmesh.addNode(7699.657480336969, 3467.671338869537, 0.0, 436)
femmesh.addNode(852.0663634056174, 2421.419154449371, 0.0, 437)
femmesh.addNode(4113.615129922027, 1289.4104734229475, 0.0, 438)
femmesh.addNode(3262.4424934747362, 4504.062929891488, 0.0, 439)
femmesh.addNode(5050.927585973604, 1584.1480393129732, 0.0, 440)
femmesh.addNode(3917.733719156471, 3621.6969973860146, 0.0, 441)
femmesh.addNode(1383.4039454847152, 5079.02150318774, 0.0, 442)
femmesh.addNode(3317.3662212002037, 774.4172335199089, 0.0, 443)
femmesh.addNode(5434.926525016685, 5674.7073384339465, 0.0, 444)
femmesh.addNode(3052.1910643655883, 319.0914604585107, 0.0, 445)
femmesh.addNode(717.6300240610055, 3664.156515438797, 0.0, 446)
femmesh.addNode(5587.988128890475, 2362.0924844129895, 0.0, 447)
femmesh.addNode(5689.034938019035, 1412.6161576564828, 0.0, 448)
femmesh.addNode(4115.567226082192, 1935.4399814346266, 0.0, 449)
femmesh.addNode(3089.4436960444414, 3015.8059012370027, 0.0, 450)
femmesh.addNode(3698.3683458223036, 1617.2321178200973, 0.0, 451)
femmesh.addNode(2502.1369544602003, 748.8123829910405, 0.0, 452)
femmesh.addNode(4922.2452503380755, 5211.4954417112995, 0.0, 453)
femmesh.addNode(2691.8483469669936, 3429.4788468746488, 0.0, 454)
femmesh.addNode(5685.819187696872, 4129.6507590219635, 0.0, 455)
femmesh.addNode(5822.554885478489, 306.5856549170968, 0.0, 456)
femmesh.addNode(7216.115596016455, 916.1361542746558, 0.0, 457)
femmesh.addNode(5217.3150338915375, 2702.2844991908223, 0.0, 458)
femmesh.addNode(3859.6204530607442, 4456.558606204545, 0.0, 459)
femmesh.addNode(6704.385602542148, 2515.634826215184, 0.0, 460)
femmesh.addNode(2672.6263318282963, 1765.1713241838895, 0.0, 461)
femmesh.addNode(4349.282176041125, 2843.4643472435573, 0.0, 462)
femmesh.addNode(297.8202976980327, 4630.770403569758, 0.0, 463)
femmesh.addNode(4120.395380973169, 2592.3235710140902, 0.0, 464)
femmesh.addNode(3451.7958869359236, 5717.139812959343, 0.0, 465)
femmesh.addNode(2218.2128307108846, 866.6565808601297, 0.0, 466)
femmesh.addNode(1879.800308930349, 5316.652962324044, 0.0, 467)
femmesh.addNode(3709.310532254126, 2968.601960842235, 0.0, 468)
femmesh.addNode(4743.776177906142, 1689.1917263705027, 0.0, 469)
femmesh.addNode(4394.639457933925, 723.988332836765, 0.0, 470)
femmesh.addNode(4699.038258206253, 2744.4658764707215, 0.0, 471)
femmesh.addNode(5937.460290603522, 5679.968274980756, 0.0, 472)
femmesh.addNode(1339.0324241513529, 1305.8487643076937, 0.0, 473)
femmesh.addNode(1336.431241509912, 4652.000840803289, 0.0, 474)
femmesh.addNode(6661.097094512917, 5718.050012997552, 0.0, 475)
femmesh.addNode(2547.0274326814665, 1344.085721719676, 0.0, 476)
femmesh.addNode(5448.932300429864, 3404.4103026806215, 0.0, 477)
femmesh.addNode(4513.582743502173, 4338.697098684659, 0.0, 478)
femmesh.addNode(6709.73178466819, 3102.4978256473055, 0.0, 479)
femmesh.addNode(4988.469116267259, 2197.8663096630016, 0.0, 480)
femmesh.addNode(3418.55180209943, 4146.383187281093, 0.0, 481)
femmesh.addNode(2101.828981491012, 3192.8293730652504, 0.0, 482)
femmesh.addNode(5435.641017808661, 2943.286147410158, 0.0, 483)
femmesh.addNode(3441.520417883232, 5142.192781204675, 0.0, 484)
femmesh.addNode(312.84027258935737, 3309.6956057010893, 0.0, 485)
femmesh.addNode(4073.705568853905, 5182.175314071145, 0.0, 486)
femmesh.addNode(5210.5884757921585, 5091.138447121744, 0.0, 487)
femmesh.addNode(5457.949657229582, 3873.653051222626, 0.0, 488)
femmesh.addNode(5070.700005727941, 5692.775772146069, 0.0, 489)
femmesh.addNode(2834.195678482428, 4093.3002651651827, 0.0, 490)
femmesh.addNode(4343.835626129535, 2223.7145272434877, 0.0, 491)
femmesh.addNode(3157.5252273551405, 1641.7187855472714, 0.0, 492)
femmesh.addNode(7739.687107200998, 4029.0189475851225, 0.0, 493)
femmesh.addNode(1193.9820984945802, 1964.372301363749, 0.0, 494)
femmesh.addNode(7726.2735900286625, 687.5326027625042, 0.0, 495)
femmesh.addNode(6934.996323144335, 5322.1498244889735, 0.0, 496)
femmesh.addNode(3965.492306109945, 5719.508456470199, 0.0, 497)
femmesh.addNode(1833.531211265304, 4452.57587935261, 0.0, 498)
femmesh.addNode(7255.955382892531, 5739.957018893448, 0.0, 499)
femmesh.addNode(4544.442112474293, 3339.8755158051863, 0.0, 500)
femmesh.addNode(3509.0173831860507, 3288.0629411238574, 0.0, 501)
femmesh.addNode(1333.333333333331, 266.6921674423133, 0.0, 502)
femmesh.addNode(1246.455792552338, 3943.1168160681245, 0.0, 503)
femmesh.addNode(7671.82243665608, 2045.08978978048, 0.0, 504)
femmesh.addNode(664.0054257521647, 2002.0003762572062, 0.0, 505)
femmesh.addNode(2725.080338578547, 4990.947615037001, 0.0, 506)
femmesh.addNode(1452.921675895289, 2477.833873082014, 0.0, 507)
femmesh.addNode(7152.573814365585, 1264.814182573502, 0.0, 508)
femmesh.addNode(5468.0739198056435, 720.8423450484322, 0.0, 509)
femmesh.addNode(7725.392372819453, 3131.7530697448874, 0.0, 510)
femmesh.addNode(7205.080473574019, 2153.0752992404005, 0.0, 511)
femmesh.addNode(7738.506645821522, 4647.832318697929, 0.0, 512)
femmesh.addNode(967.7867487459522, 2749.220623124383, 0.0, 513)
femmesh.addNode(6939.3590862421825, 1738.8584485341664, 0.0, 514)
femmesh.addNode(6178.773825732844, 4159.694920788544, 0.0, 515)
femmesh.addNode(7303.794820638505, 2970.0961289267307, 0.0, 516)
femmesh.addNode(7226.520629430566, 1562.4250294684975, 0.0, 517)
femmesh.addNode(7741.820450739236, 1350.0, 0.0, 518)
femmesh.addNode(722.7259334596312, 931.9750850103734, 0.0, 519)
femmesh.addNode(1929.5639363908322, 4786.057610646385, 0.0, 520)
femmesh.addNode(4030.9218236587863, 268.0181913625381, 0.0, 521)
femmesh.addNode(940.9452676929355, 1493.047796935849, 0.0, 522)
femmesh.addNode(923.8259286131885, 4509.587879703922, 0.0, 523)
femmesh.addNode(4972.926175193251, 735.0482181329983, 0.0, 524)
femmesh.addNode(7314.152871169299, 4942.321760659757, 0.0, 525)
femmesh.addNode(7730.628307770583, 5312.084618080309, 0.0, 526)
femmesh.addNode(6243.042679488634, 1104.8267878014224, 0.0, 527)
femmesh.addNode(2166.34800818854, 3486.033443101096, 0.0, 528)
femmesh.addNode(1791.182019072847, 1065.6994832561645, 0.0, 529)
femmesh.addNode(4693.71601286955, 283.2512769942201, 0.0, 530)
femmesh.addNode(5585.704144994118, 1681.203136378545, 0.0, 531)
femmesh.addNode(710.161986014143, 5065.236506469547, 0.0, 532)
femmesh.addNode(4580.758362874801, 1179.728519598056, 0.0, 533)
femmesh.addNode(1491.5272685921811, 709.500652737893, 0.0, 534)
femmesh.addNode(624.6271074576289, 298.27695501945294, 0.0, 535)
femmesh.addNode(2967.06909414469, 3537.0088950923446, 0.0, 536)
femmesh.addNode(2405.7328284113396, 282.856722823379, 0.0, 537)
femmesh.addNode(2738.0546482353043, 2352.4236232081976, 0.0, 538)
femmesh.addNode(6401.2394796078315, 2940.815854871483, 0.0, 539)
femmesh.addNode(7108.19263962835, 635.5586686593102, 0.0, 540)
femmesh.addNode(6656.390813550942, 1380.3033377931897, 0.0, 541)
femmesh.addNode(6043.831321753771, 4633.234533455912, 0.0, 542)
femmesh.addNode(4721.16668715434, 4125.139738225244, 0.0, 543)
femmesh.addNode(1526.317693000979, 4425.931537245603, 0.0, 544)
femmesh.addNode(2856.9031223791794, 1235.5922818643587, 0.0, 545)
femmesh.addNode(1667.0413113868217, 3107.9483938783933, 0.0, 546)
femmesh.addNode(5155.803715908436, 3910.80238661925, 0.0, 547)
femmesh.addNode(5946.577768389446, 2091.170848821226, 0.0, 548)
femmesh.addNode(1194.4453099462119, 2248.1095928705754, 0.0, 549)
femmesh.addNode(6922.751848183759, 2819.9762673987525, 0.0, 550)
femmesh.addNode(6190.490338145421, 1678.9200868066084, 0.0, 551)
femmesh.addNode(3247.687431535846, 3209.5160076575958, 0.0, 552)
femmesh.addNode(7408.591570458754, 2538.3703459313115, 0.0, 553)
femmesh.addNode(2282.47082712219, 1254.746220629342, 0.0, 554)
femmesh.addNode(2132.2091900271516, 1772.438504133486, 0.0, 555)
femmesh.addNode(3424.681865436226, 1774.1140487341595, 0.0, 556)
femmesh.addNode(3418.7164283924403, 2054.450819765342, 0.0, 557)
femmesh.addNode(1179.4576460939484, 809.9370254008327, 0.0, 558)
femmesh.addNode(1262.722629987954, 5317.710122060681, 0.0, 559)
femmesh.addNode(4079.318698927749, 4649.051081831825, 0.0, 560)
femmesh.addNode(2924.7874036597086, 1810.0803017983594, 0.0, 561)
femmesh.addNode(7240.0300717518385, 3828.794555472594, 0.0, 562)
femmesh.addNode(3883.2729462580755, 5413.1992046940395, 0.0, 563)
femmesh.addNode(6189.678396669246, 2220.9032263668005, 0.0, 564)
femmesh.addNode(2244.822649395099, 4819.977361531794, 0.0, 565)
femmesh.addNode(2003.210320148662, 633.8443419719574, 0.0, 566)
femmesh.addNode(2489.490123847494, 2176.802779443699, 0.0, 567)
femmesh.addNode(2539.198818666904, 3702.1182730220376, 0.0, 568)
femmesh.addNode(4356.704072065866, 5197.059743681803, 0.0, 569)
femmesh.addNode(2161.34454034768, 2036.3116425755577, 0.0, 570)
femmesh.addNode(3865.8393816515136, 690.9368418386389, 0.0, 571)
femmesh.addNode(4042.0363531421444, 3375.2634430541025, 0.0, 572)
femmesh.addNode(1627.7142948634164, 3558.2248615468197, 0.0, 573)
femmesh.addNode(7407.388700150634, 1974.850934552782, 0.0, 574)
femmesh.addNode(3423.267554715098, 3643.457894388013, 0.0, 575)
femmesh.addNode(2372.4231375619115, 4573.673129222867, 0.0, 576)
femmesh.addNode(1210.8277639475282, 3706.9485653999477, 0.0, 577)
femmesh.addNode(7169.356634383594, 2425.7496526266914, 0.0, 578)
femmesh.addNode(6918.328467523301, 249.49598358478482, 0.0, 579)
femmesh.addNode(7730.874457753487, 5730.516153976924, 0.0, 580)
femmesh.addNode(4911.0716099769215, 3189.011146540938, 0.0, 581)
femmesh.addNode(4704.962751209002, 1976.7445416105706, 0.0, 582)
femmesh.addNode(2057.0711132894726, 3910.336144619501, 0.0, 583)
femmesh.addNode(5465.5101026011025, 5343.53019910437, 0.0, 584)
femmesh.addNode(7454.411974715306, 5284.166041789147, 0.0, 585)
femmesh.addNode(3854.479091618668, 2476.4846987096553, 0.0, 586)
femmesh.addNode(3975.898105570808, 3140.0515925059863, 0.0, 587)
femmesh.addNode(5774.264181489211, 5270.221993499352, 0.0, 588)
femmesh.addNode(231.8249691868472, 1637.5237907057171, 0.0, 589)
femmesh.addNode(1277.2291045783618, 2749.6798332821254, 0.0, 590)
femmesh.addNode(5473.693448996911, 1186.840294404773, 0.0, 591)
femmesh.addNode(5371.833565525066, 2475.462790725797, 0.0, 592)
femmesh.addNode(4323.602233314767, 3865.083675137802, 0.0, 593)
femmesh.addNode(912.8628139788123, 3383.249088628385, 0.0, 594)
femmesh.addNode(243.4295842170963, 2078.3985342679584, 0.0, 595)
femmesh.addNode(6935.259448158041, 2031.4302735176093, 0.0, 596)
femmesh.addNode(211.9984552794599, 4048.6488513738286, 0.0, 597)
femmesh.addNode(2772.462276984934, 4721.206809711335, 0.0, 598)
femmesh.addNode(6305.238886322673, 234.95886392138928, 0.0, 599)
femmesh.addNode(7230.891468223379, 5442.048753853078, 0.0, 600)
femmesh.addNode(5512.6394728493515, 238.04834278204174, 0.0, 601)
femmesh.addNode(1619.6910797101407, 5773.6124170727535, 0.0, 602)
femmesh.addNode(2479.9209008880734, 5403.183891491337, 0.0, 603)
femmesh.addNode(2070.3160628156097, 5738.861794816952, 0.0, 604)
femmesh.addNode(6375.264650330476, 1865.9917935034364, 0.0, 605)
femmesh.addNode(1031.9562844128895, 5769.006763532598, 0.0, 606)
femmesh.addNode(6265.808023974815, 3844.300408214667, 0.0, 607)
femmesh.addNode(1654.9993175669306, 2020.2530673917213, 0.0, 608)
femmesh.addNode(7153.171435619634, 3329.9593243366235, 0.0, 609)
femmesh.addNode(4286.223632984385, 1712.4692927440046, 0.0, 610)
femmesh.addNode(4451.349298609171, 4958.376562059673, 0.0, 611)
femmesh.addNode(5186.910026854137, 212.06636231126453, 0.0, 612)
femmesh.addNode(2578.7527701937665, 3123.69197619792, 0.0, 613)
femmesh.addNode(5776.858575101864, 2584.14832653534, 0.0, 614)
femmesh.addNode(7368.050177751716, 4374.351092159811, 0.0, 615)
femmesh.addNode(233.86688181995729, 2853.4080758781265, 0.0, 616)
femmesh.addNode(6486.393862312616, 4870.847189242479, 0.0, 617)
femmesh.addNode(6886.138995403915, 4585.223518498947, 0.0, 618)
femmesh.addNode(5922.477242685285, 3703.5763447693994, 0.0, 619)
femmesh.addNode(6591.503893510523, 4372.741484900332, 0.0, 620)
femmesh.addNode(231.81166885210675, 4957.778546792837, 0.0, 621)
femmesh.addNode(228.62380945363049, 1045.374636617549, 0.0, 622)
femmesh.addNode(3004.3184583377865, 5372.715182623278, 0.0, 623)
femmesh.addNode(6385.766002081878, 5769.657386335872, 0.0, 624)
femmesh.addNode(7794.75039951331, 3747.0566277999933, 0.0, 625)
femmesh.addNode(6019.939674630335, 4943.543019367761, 0.0, 626)
femmesh.addNode(3195.0653130500623, 1344.3985367274709, 0.0, 627)
femmesh.addNode(2811.743180673703, 219.47728697613394, 0.0, 628)
femmesh.addNode(689.2485148549873, 3973.481065898407, 0.0, 629)
femmesh.addNode(3880.650039729889, 1817.7921036490995, 0.0, 630)
femmesh.addNode(3179.1037965952632, 5757.109834833701, 0.0, 631)
femmesh.addNode(7766.824628326239, 2281.970584999322, 0.0, 632)
femmesh.addNode(1677.2320473992283, 3980.5486554044087, 0.0, 633)
femmesh.addNode(5794.791988511394, 884.1724024931256, 0.0, 634)
femmesh.addNode(7412.781274043341, 254.58465586025423, 0.0, 635)
femmesh.addNode(6193.890942005577, 3352.485485715128, 0.0, 636)
femmesh.addNode(4546.243647203619, 5417.071463756736, 0.0, 637)
femmesh.addNode(2191.081358123883, 5305.309956253436, 0.0, 638)
femmesh.addNode(3191.396700192288, 4950.424148223933, 0.0, 639)
femmesh.addNode(4225.297363344471, 5759.923903915871, 0.0, 640)
femmesh.addNode(4976.344215678057, 3629.4972670024436, 0.0, 641)
femmesh.addNode(5698.037661607894, 5757.736765869845, 0.0, 642)
femmesh.addNode(3010.238704575181, 3911.659865723999, 0.0, 643)
femmesh.addNode(3871.8296759758955, 2057.9153649043133, 0.0, 644)
femmesh.addNode(6745.70221134085, 1132.0551650706996, 0.0, 645)
femmesh.addNode(3560.9078855313464, 255.15464434189056, 0.0, 646)
femmesh.addNode(4348.544851981782, 4621.517329235005, 0.0, 647)
femmesh.addNode(6983.712637984722, 2551.931846505183, 0.0, 648)
femmesh.addNode(4251.394224701829, 4145.614599615976, 0.0, 649)
femmesh.addNode(7772.002527403345, 1617.5349953137304, 0.0, 650)
femmesh.addNode(2071.134898911248, 4330.571340232713, 0.0, 651)
femmesh.addNode(2325.067791136765, 5754.209202323643, 0.0, 652)
femmesh.addNode(5981.228864201084, 5426.7734342243875, 0.0, 653)
femmesh.addNode(5027.050753095909, 4690.037583475891, 0.0, 654)
femmesh.addNode(5480.797298615219, 2029.3986783417329, 0.0, 655)
femmesh.addNode(604.704157760315, 2455.1729768673063, 0.0, 656)
femmesh.addNode(5171.252289758761, 2012.6167387243493, 0.0, 657)
femmesh.addNode(981.0448133667329, 3636.6217674739146, 0.0, 658)
femmesh.addNode(244.66777119757057, 2341.087110344873, 0.0, 659)
femmesh.addNode(7451.203050102608, 4039.958737455424, 0.0, 660)
femmesh.addNode(6673.463202445251, 2201.3903891004625, 0.0, 661)
femmesh.addNode(3946.366417118861, 4953.074340636045, 0.0, 662)
femmesh.addNode(3543.2782155726627, 5410.287922706808, 0.0, 663)
femmesh.addNode(1179.8135616646825, 3181.667416999124, 0.0, 664)
femmesh.addNode(6728.681911293427, 4045.2437934759328, 0.0, 665)
femmesh.addNode(6474.687920164094, 2634.2296694011075, 0.0, 666)
femmesh.addNode(7419.009338945211, 3508.08206400413, 0.0, 667)
femmesh.addNode(6352.970682877889, 4582.690816278861, 0.0, 668)
femmesh.addNode(655.60704275733, 2941.6673827429368, 0.0, 669)
femmesh.addNode(4680.106239000447, 2514.911175376071, 0.0, 670)
femmesh.addNode(3558.0060578462108, 1321.6578218526356, 0.0, 671)
femmesh.addNode(6376.216135579232, 933.0499696768462, 0.0, 672)
femmesh.addNode(5734.3476279080605, 3872.091637279832, 0.0, 673)
femmesh.addNode(5574.303485888935, 4807.420813122383, 0.0, 674)
femmesh.addNode(5931.860873417766, 1843.5555111010833, 0.0, 675)
femmesh.addNode(4545.062334902329, 896.5642507821021, 0.0, 676)
femmesh.addNode(3660.8866986857915, 4918.04538572049, 0.0, 677)
femmesh.addNode(3780.864048487133, 960.3213210298263, 0.0, 678)
femmesh.addNode(6023.7012758737255, 603.990712191235, 0.0, 679)
femmesh.addNode(5895.2378155235065, 3031.985357691641, 0.0, 680)
femmesh.addNode(7394.22059512344, 1118.5235472479005, 0.0, 681)
femmesh.addNode(3004.0438635732626, 2043.6105321832192, 0.0, 682)
femmesh.addNode(3081.8548921882334, 857.365185031774, 0.0, 683)
femmesh.addNode(585.5351361249942, 4211.928404950215, 0.0, 684)
femmesh.addNode(5658.014432307285, 2893.28801742469, 0.0, 685)
femmesh.addNode(1451.6451835070357, 4855.3673533763595, 0.0, 686)
femmesh.addNode(2765.27892389935, 5779.359189014944, 0.0, 687)
femmesh.addNode(870.2902456962894, 5486.655743343572, 0.0, 688)
femmesh.addNode(7787.762076240306, 4887.109949102407, 0.0, 689)
femmesh.addNode(3554.47391121278, 4661.594333353671, 0.0, 690)
femmesh.addNode(4700.793478538623, 4956.50528912313, 0.0, 691)
femmesh.addNode(1972.9669301010545, 887.890174710281, 0.0, 692)
femmesh.addNode(5162.830901481482, 2426.749182932, 0.0, 693)
femmesh.addNode(1659.094893589362, 2295.7353299982683, 0.0, 694)
femmesh.addNode(1097.5513471651684, 223.17522248963803, 0.0, 695)
femmesh.addNode(554.668726443822, 4687.32603636818, 0.0, 696)
femmesh.addNode(6584.053137574225, 584.065680234474, 0.0, 697)
femmesh.addNode(3873.411382506447, 1411.0293244320474, 0.0, 698)
femmesh.addNode(1876.735979275271, 1328.6614350324999, 0.0, 699)
femmesh.addNode(558.0370263214184, 1316.7004362305465, 0.0, 700)
femmesh.addNode(6997.373829246505, 4082.724871629095, 0.0, 701)
femmesh.addNode(5096.878796164898, 3420.2479505881606, 0.0, 702)
femmesh.addNode(1396.8835161473053, 3571.253122249589, 0.0, 703)
femmesh.addNode(4663.348458325404, 5787.573269939614, 0.0, 704)
femmesh.addNode(260.7028909063048, 3773.3507731078653, 0.0, 705)
femmesh.addNode(7010.473210279063, 5038.647948387555, 0.0, 706)
femmesh.addNode(3703.7037037036944, 5814.396894736165, 0.0, 707)
femmesh.addNode(6027.150950410699, 2622.9477279269677, 0.0, 708)
femmesh.addNode(2740.1183798746188, 619.9689263583819, 0.0, 709)
femmesh.addNode(5189.499896025458, 619.3837773149462, 0.0, 710)
femmesh.addNode(3586.506405196444, 550.7815683258195, 0.0, 711)
femmesh.addNode(2194.987623854444, 219.35507233726327, 0.0, 712)
femmesh.addNode(6952.846593223185, 5788.207510598596, 0.0, 713)
femmesh.addNode(5786.263991544878, 5009.5320714914105, 0.0, 714)
femmesh.addNode(4012.0480351586225, 4220.629821934257, 0.0, 715)
femmesh.addNode(5819.292309685397, 4524.975633009085, 0.0, 716)
femmesh.addNode(7782.561366478809, 1044.9664362804583, 0.0, 717)
femmesh.addNode(6279.857377354824, 5322.001033101209, 0.0, 718)
femmesh.addNode(2319.604310150899, 3074.782696813805, 0.0, 719)
femmesh.addNode(6788.765367574882, 4836.8905537201, 0.0, 720)
femmesh.addNode(712.3837086741332, 2232.1795042719523, 0.0, 721)
femmesh.addNode(1431.098810327134, 1892.9905610510473, 0.0, 722)
femmesh.addNode(7808.11448364904, 2852.967755939065, 0.0, 723)
femmesh.addNode(2525.828125024906, 1582.8425930603191, 0.0, 724)
femmesh.addNode(3190.4539047908393, 2172.5939373401834, 0.0, 725)
femmesh.addNode(6940.69566802581, 793.5401535989213, 0.0, 726)
femmesh.addNode(5703.462130202242, 1148.527541717765, 0.0, 727)
femmesh.addNode(3463.051013788995, 2775.062693898545, 0.0, 728)
femmesh.addNode(7796.726177457187, 4352.626640444091, 0.0, 729)
femmesh.addNode(5580.805291554545, 461.4786835981061, 0.0, 730)
femmesh.addNode(611.4875208222093, 1775.0876496125954, 0.0, 731)
femmesh.addNode(7518.014942230175, 5796.939408216338, 0.0, 732)
femmesh.addNode(1129.6654880248216, 4143.8728391465065, 0.0, 733)
femmesh.addNode(6142.219295343434, 5792.135433631225, 0.0, 734)
femmesh.addNode(3424.4997768010785, 2531.3697290869395, 0.0, 735)
femmesh.addNode(6241.095268228463, 2467.351569343273, 0.0, 736)
femmesh.addNode(4599.9205343073145, 3899.9090404954377, 0.0, 737)
femmesh.addNode(2380.953591693878, 3539.28978149762, 0.0, 738)
femmesh.addNode(6419.376965091281, 2085.882606357815, 0.0, 739)
femmesh.addNode(2144.1595705334644, 2867.683809341155, 0.0, 740)
femmesh.addNode(1888.1145065450337, 2737.549388652982, 0.0, 741)
femmesh.addNode(196.00333475926118, 3110.4258175768596, 0.0, 742)
femmesh.addNode(6111.9031180580105, 2875.0585140836074, 0.0, 743)
femmesh.addNode(2318.226438471728, 2632.182570843897, 0.0, 744)
femmesh.addNode(221.57756791084597, 5777.43339505779, 0.0, 745)
femmesh.addNode(221.80552637723434, 223.009254943073, 0.0, 746)
femmesh.addNode(4872.3518380550795, 5817.242926532023, 0.0, 747)
femmesh.addNode(3865.2605688800986, 3853.129767883814, 0.0, 748)
femmesh.addNode(3179.991383469728, 2455.7952181232536, 0.0, 749)
femmesh.addNode(5186.634406051553, 2991.4265060837615, 0.0, 750)
femmesh.addNode(214.33040809660318, 468.9046623767502, 0.0, 751)
femmesh.addNode(211.07440020453453, 5527.9552281719825, 0.0, 752)
femmesh.addNode(3014.2383291309907, 3305.850992778784, 0.0, 753)
femmesh.addNode(7199.230778367561, 4653.833362292602, 0.0, 754)
femmesh.addNode(1163.8311758101122, 5114.632406477825, 0.0, 755)
femmesh.addNode(1722.0439197160224, 499.2582450775219, 0.0, 756)
femmesh.addNode(6981.923946641713, 3607.9383954600453, 0.0, 757)
femmesh.addNode(458.02324165381947, 2815.5588986456455, 0.0, 758)
femmesh.addNode(3256.3022642579417, 224.8132839930043, 0.0, 759)
femmesh.addNode(3873.23636021812, 2712.6297583070414, 0.0, 760)
femmesh.addNode(4910.216579634459, 176.56383757158628, 0.0, 761)
femmesh.addNode(511.22450260381254, 5791.305770680782, 0.0, 762)
femmesh.addNode(7803.473441573102, 5527.030202261261, 0.0, 763)
femmesh.addNode(4779.915568604455, 4707.623497640575, 0.0, 764)
femmesh.addNode(4963.043088722695, 5431.809496481827, 0.0, 765)
femmesh.addNode(4247.268001838055, 1481.020611457224, 0.0, 766)
femmesh.addNode(5732.940937632042, 2158.014504343027, 0.0, 767)
femmesh.addNode(4174.520085422308, 762.1480990967555, 0.0, 768)
femmesh.addNode(3181.8893338152034, 5204.946801119518, 0.0, 769)
femmesh.addNode(6702.473080041118, 192.8310175088782, 0.0, 770)
femmesh.addNode(2095.254919929917, 1373.7314904909426, 0.0, 771)
femmesh.addNode(2048.2934550589653, 3686.932386954009, 0.0, 772)
femmesh.addNode(3196.8815847467063, 583.075895625296, 0.0, 773)
femmesh.addNode(470.28870584653316, 841.195385207082, 0.0, 774)
femmesh.addNode(4243.033916902981, 201.146760489176, 0.0, 775)
femmesh.addNode(1485.0094162506439, 1490.1868733555805, 0.0, 776)
femmesh.addNode(6725.626167325743, 1606.3255781165349, 0.0, 777)
femmesh.addNode(1787.8099762209974, 221.79198964494955, 0.0, 778)
femmesh.addNode(6303.202265065661, 465.8759364089426, 0.0, 779)
femmesh.addNode(6074.150790107617, 169.70701065603228, 0.0, 780)
femmesh.addNode(5005.018839825389, 979.1746828735724, 0.0, 781)
femmesh.addNode(1878.37688714491, 3177.124580302714, 0.0, 782)
femmesh.addNode(6431.761654157427, 5550.798462712135, 0.0, 783)
femmesh.addNode(1674.5585311765158, 4222.87196042426, 0.0, 784)
femmesh.addNode(3677.021332473203, 3533.6134856093586, 0.0, 785)
femmesh.addNode(7375.331949431746, 1742.8570129200461, 0.0, 786)
femmesh.addNode(439.54862092186886, 167.10408822191343, 0.0, 787)
femmesh.addNode(1087.4940396624463, 4660.3685041514, 0.0, 788)
femmesh.addNode(2926.7997695741387, 2863.2622049550796, 0.0, 789)
femmesh.addNode(3115.6127071627748, 3690.984835609068, 0.0, 790)
femmesh.addNode(6353.329118540121, 3564.247526559453, 0.0, 791)
femmesh.addNode(1106.543838510997, 1324.2375590629008, 0.0, 792)
femmesh.addNode(6831.885349250451, 5554.414414668315, 0.0, 793)
femmesh.addNode(3055.595444319715, 4439.605808740129, 0.0, 794)
femmesh.addNode(5133.236929475503, 4876.293705161336, 0.0, 795)
femmesh.addNode(7829.372319094267, 213.18313173660414, 0.0, 796)
femmesh.addNode(2008.7689260384534, 2423.3360495264787, 0.0, 797)
femmesh.addNode(7781.828319235978, 1839.364710720715, 0.0, 798)
femmesh.addNode(5446.047976769356, 3624.313787580194, 0.0, 799)
femmesh.addNode(1021.1055622830836, 554.8561043735345, 0.0, 800)
femmesh.addNode(7404.78052672849, 2747.490464787852, 0.0, 801)
femmesh.addNode(7511.312554123875, 3053.2453064817505, 0.0, 802)
femmesh.addNode(2746.6816687168184, 5528.491177749865, 0.0, 803)
femmesh.addNode(1735.9430909862372, 1537.3803038437818, 0.0, 804)
femmesh.addNode(200.8469656101645, 3508.643151673744, 0.0, 805)
femmesh.addNode(7522.387812389939, 1562.3969896657823, 0.0, 806)
femmesh.addNode(906.0764241352223, 4972.645050447971, 0.0, 807)
femmesh.addNode(1629.7243782181613, 5561.465994080193, 0.0, 808)
femmesh.addNode(6025.673219847194, 1423.8319883999982, 0.0, 809)
femmesh.addNode(4519.551287107112, 1737.8592257885407, 0.0, 810)
femmesh.addNode(471.58414497416675, 5145.111607230162, 0.0, 811)
femmesh.addNode(2693.3241740935628, 4254.118094329861, 0.0, 812)
femmesh.addNode(440.9777702500636, 1638.164063171364, 0.0, 813)
femmesh.addNode(5367.27242342023, 959.3222350518702, 0.0, 814)
femmesh.addNode(2852.0251263404166, 439.87993222534715, 0.0, 815)
femmesh.addNode(5386.096481900847, 4590.106289408233, 0.0, 816)
femmesh.addNode(2031.9085609386927, 5499.020507644635, 0.0, 817)
femmesh.addNode(7842.282420583796, 450.0, 0.0, 818)
femmesh.addNode(3973.313452093168, 1117.3201454738632, 0.0, 819)
femmesh.addNode(7582.006414577216, 4470.19019899655, 0.0, 820)
femmesh.addNode(6240.219884391122, 5037.334269150257, 0.0, 821)
femmesh.addNode(7591.186715278582, 1171.1007534391983, 0.0, 822)
femmesh.addNode(4339.124974164889, 1298.9107047650234, 0.0, 823)
femmesh.addNode(2262.329564554463, 4387.186150036952, 0.0, 824)
femmesh.addNode(521.9574975008106, 3790.031597052506, 0.0, 825)
femmesh.addNode(7565.429274360342, 4831.81672557419, 0.0, 826)
femmesh.addNode(4885.18026527887, 1174.05818769299, 0.0, 827)
femmesh.addNode(3460.360750059373, 3927.545938102442, 0.0, 828)
femmesh.addNode(4234.21146417405, 2408.4905980822723, 0.0, 829)
femmesh.addNode(1469.8512380358118, 472.24024827065307, 0.0, 830)
femmesh.addNode(2604.522501327609, 204.45574222938612, 0.0, 831)
femmesh.addNode(2612.8424121719945, 4517.190202706002, 0.0, 832)
femmesh.addNode(4623.644876662934, 4547.9453351780485, 0.0, 833)
femmesh.addNode(441.71416923636394, 577.9613402823597, 0.0, 834)
femmesh.addNode(441.714169236365, 5422.03865971764, 0.0, 835)
femmesh.addNode(2946.1394280300233, 4887.630205901774, 0.0, 836)
femmesh.addNode(3338.164729539498, 4321.409123687981, 0.0, 837)
femmesh.addNode(1453.3901914270816, 3114.4717280219606, 0.0, 838)
femmesh.addNode(5156.128659672082, 1788.4144435794203, 0.0, 839)
femmesh.addNode(6159.626777669178, 4409.512074067423, 0.0, 840)
femmesh.addNode(924.8866777254106, 984.7829005826844, 0.0, 841)
femmesh.addNode(2673.9808903846188, 1138.0263438088466, 0.0, 842)
femmesh.addNode(4075.4358224955818, 3779.6963407803087, 0.0, 843)
femmesh.addNode(1094.9160650751094, 2940.5021817421703, 0.0, 844)
femmesh.addNode(6912.404148541939, 1258.3438929925874, 0.0, 845)
femmesh.addNode(5414.51326450606, 3172.6073243219735, 0.0, 846)
femmesh.addNode(3293.989835023435, 4730.489978396533, 0.0, 847)
femmesh.addNode(5280.020347319281, 1638.413925643536, 0.0, 848)
femmesh.addNode(4768.099784995892, 5362.461220031454, 0.0, 849)
femmesh.addNode(5720.468854239854, 4352.81773765507, 0.0, 850)
femmesh.addNode(5041.5585274798905, 1352.5205727595348, 0.0, 851)
femmesh.addNode(2496.445168419508, 514.1607306334574, 0.0, 852)
femmesh.addNode(6063.574737772505, 4078.3393959379264, 0.0, 853)
femmesh.addNode(6053.852909966514, 4233.047702055613, 0.0, 854)
femmesh.addNode(5938.653822006176, 4151.6921772049955, 0.0, 855)
femmesh.addNode(7382.5741394953475, 925.4238379674, 0.0, 856)
femmesh.addNode(7313.685056203803, 756.1661496620175, 0.0, 857)
femmesh.addNode(7480.143599682695, 765.4538333547616, 0.0, 858)
femmesh.addNode(5369.415640010457, 5082.623762174402, 0.0, 859)
femmesh.addNode(5275.461357157095, 4949.01732020395, 0.0, 860)
femmesh.addNode(5434.288521375393, 4940.502635256607, 0.0, 861)
femmesh.addNode(5162.590656561084, 3532.9990535206616, 0.0, 862)
femmesh.addNode(5272.905548297381, 3412.3291266343913, 0.0, 863)
femmesh.addNode(5338.617408693566, 3525.080229566892, 0.0, 864)
femmesh.addNode(1516.4846051290135, 2658.1146010858884, 0.0, 865)
femmesh.addNode(1610.9931700559123, 2504.7678442840333, 0.0, 866)
femmesh.addNode(1674.5560992896367, 2685.0485722879075, 0.0, 867)
femmesh.addNode(6760.523705099349, 3244.844201703093, 0.0, 868)
femmesh.addNode(6867.66090967698, 3092.3541520795725, 0.0, 869)
femmesh.addNode(6918.452830108139, 3234.7005281353604, 0.0, 870)
femmesh.addNode(5492.721603923168, 4569.3631233905135, 0.0, 871)
femmesh.addNode(5389.3128439996835, 4422.1880310605875, 0.0, 872)
femmesh.addNode(5495.937966022006, 4401.444865042868, 0.0, 873)
femmesh.addNode(7865.3141538852915, 5206.042309040155, 0.0, 874)
femmesh.addNode(7682.751384395123, 5183.992659660113, 0.0, 875)
femmesh.addNode(7817.437230509831, 5077.950350619958, 0.0, 876)
femmesh.addNode(5384.129929175004, 5837.353669216973, 0.0, 877)
femmesh.addNode(5532.278077323153, 5837.353669216973, 0.0, 878)
femmesh.addNode(6519.670385583855, 4216.789776583568, 0.0, 879)
femmesh.addNode(6471.002545948193, 4343.73555397699, 0.0, 880)
femmesh.addNode(6399.169038021525, 4187.783845660226, 0.0, 881)
femmesh.addNode(4394.727935373441, 3266.1822667484034, 0.0, 882)
femmesh.addNode(4392.745419229597, 3451.03988339405, 0.0, 883)
femmesh.addNode(4243.031242128745, 3377.346634337267, 0.0, 884)
femmesh.addNode(531.3650847971887, 2983.2633682664923, 0.0, 885)
femmesh.addNode(630.8485129579558, 3108.0795886826336, 0.0, 886)
femmesh.addNode(506.60655499781456, 3149.675574206189, 0.0, 887)
femmesh.addNode(3680.825338839532, 315.9441841517569, 0.0, 888)
femmesh.addNode(3706.3798686915948, 127.57732217094528, 0.0, 889)
femmesh.addNode(3826.2973219997803, 188.36686198081162, 0.0, 890)
femmesh.addNode(1953.9195286609986, 5690.9702510766965, 0.0, 891)
femmesh.addNode(1924.0469202966924, 5869.430897408476, 0.0, 892)
femmesh.addNode(1807.6503861420815, 5821.53935366822, 0.0, 893)
femmesh.addNode(7711.318268629984, 4971.5053251711615, 0.0, 894)
femmesh.addNode(7893.881038120153, 4993.5549745512035, 0.0, 895)
femmesh.addNode(1479.3631779004372, 1303.1103495427976, 0.0, 896)
femmesh.addNode(1405.4437005886068, 1153.400190392137, 0.0, 897)
femmesh.addNode(1545.7744543376912, 1150.6617756272408, 0.0, 898)
femmesh.addNode(5286.167645833921, 4538.6465516155595, 0.0, 899)
femmesh.addNode(5289.384007932757, 4370.728293267914, 0.0, 900)
femmesh.addNode(2398.0360097910707, 1562.0665781250723, 0.0, 901)
femmesh.addNode(2464.7625248521736, 1727.801424408495, 0.0, 902)
femmesh.addNode(2336.9704096183377, 1707.0254094732481, 0.0, 903)
femmesh.addNode(318.4632116470107, 1967.75400062245, 0.0, 904)
femmesh.addNode(121.71479210854815, 1939.1992671339792, 0.0, 905)
femmesh.addNode(196.74841953846257, 1828.554733488471, 0.0, 906)
femmesh.addNode(1220.5080629468923, 1204.475547166113, 0.0, 907)
femmesh.addNode(1286.9193393841463, 1052.0269732505562, 0.0, 908)
femmesh.addNode(4932.108809364495, 4306.896928048696, 0.0, 909)
femmesh.addNode(5105.876213950365, 4334.447698297157, 0.0, 910)
femmesh.addNode(5012.471405181124, 4459.636043574425, 0.0, 911)
femmesh.addNode(5457.318862205484, 4807.158503204269, 0.0, 912)
femmesh.addNode(5551.273145058845, 4940.764945174722, 0.0, 913)
femmesh.addNode(5807.375590948528, 4218.025621172323, 0.0, 914)
femmesh.addNode(5817.097418754519, 4063.3173150546363, 0.0, 915)
femmesh.addNode(6359.299678078662, 2660.6760548697166, 0.0, 916)
femmesh.addNode(6437.963699885962, 2787.5227621362956, 0.0, 917)
femmesh.addNode(6322.575457800531, 2813.9691476049047, 0.0, 918)
femmesh.addNode(5209.021412116128, 4217.989177742185, 0.0, 919)
femmesh.addNode(2389.656887052863, 4062.9028413600645, 0.0, 920)
femmesh.addNode(2251.4510405510273, 3949.161058246227, 0.0, 921)
femmesh.addNode(2410.692815244226, 3889.551146048091, 0.0, 922)
femmesh.addNode(2538.1616282538685, 1818.9657899702802, 0.0, 923)
femmesh.addNode(2708.1064781140312, 1918.3129804999635, 0.0, 924)
femmesh.addNode(2573.6417745396034, 1972.107446286354, 0.0, 925)
femmesh.addNode(4759.621391463934, 1557.2046558954457, 0.0, 926)
femmesh.addNode(4606.698858139414, 1600.960022005766, 0.0, 927)
femmesh.addNode(4622.544071697206, 1468.972951530709, 0.0, 928)
femmesh.addNode(6519.75532430756, 5249.788465360959, 0.0, 929)
femmesh.addNode(6572.109331871065, 5105.409170331983, 0.0, 930)
femmesh.addNode(6643.764049196345, 5222.953376441226, 0.0, 931)
femmesh.addNode(7568.764053209907, 641.8643739059416, 0.0, 932)
femmesh.addNode(7637.653136501451, 811.1220622113242, 0.0, 933)
femmesh.addNode(3215.436026406124, 944.1726123839974, 0.0, 934)
femmesh.addNode(3389.461258063864, 921.8194807765967, 0.0, 935)
femmesh.addNode(3287.531063269784, 1091.5748596406852, 0.0, 936)
femmesh.addNode(2380.615392629812, 1143.578159491165, 0.0, 937)
femmesh.addNode(2250.3418289165375, 1060.7014007447358, 0.0, 938)
femmesh.addNode(2348.4863944241597, 949.5333396065588, 0.0, 939)
femmesh.addNode(5566.48209331229, 5716.222052151896, 0.0, 940)
femmesh.addNode(5663.833645618757, 5878.868382934923, 0.0, 941)
femmesh.addNode(5202.016669530632, 5846.387886073035, 0.0, 942)
femmesh.addNode(5252.813265372313, 5683.741555290007, 0.0, 943)
femmesh.addNode(5670.676245288925, 3389.3470482803036, 0.0, 944)
femmesh.addNode(5552.504991899092, 3273.767211335644, 0.0, 945)
femmesh.addNode(5774.248936758153, 3258.703956935326, 0.0, 946)
femmesh.addNode(5553.024351273132, 3530.5022314892417, 0.0, 947)
femmesh.addNode(5774.768296132193, 3515.438977088924, 0.0, 948)
femmesh.addNode(946.9538136727726, 3509.9354280511498, 0.0, 949)
femmesh.addNode(849.3374187138693, 3650.389141456356, 0.0, 950)
femmesh.addNode(815.2464190199089, 3523.702802033591, 0.0, 951)
femmesh.addNode(3025.984217714621, 1289.9954092959147, 0.0, 952)
femmesh.addNode(3010.0897721938786, 1422.4563777572698, 0.0, 953)
femmesh.addNode(2841.008676858437, 1368.0532503257136, 0.0, 954)
femmesh.addNode(1763.735402031091, 1931.5263379889052, 0.0, 955)
femmesh.addNode(1609.6307883879317, 1859.6535442843558, 0.0, 956)
femmesh.addNode(1718.3668728520925, 1770.92681488154, 0.0, 957)
femmesh.addNode(7076.357659691194, 1518.2407199070994, 0.0, 958)
femmesh.addNode(7082.939857836374, 1650.641739001332, 0.0, 959)
femmesh.addNode(6932.7768880970025, 1606.457429439934, 0.0, 960)
femmesh.addNode(4384.058449932262, 5451.213041324418, 0.0, 961)
femmesh.addNode(4501.041052436297, 5537.411146607632, 0.0, 962)
femmesh.addNode(4338.85585516494, 5571.5527241753125, 0.0, 963)
femmesh.addNode(2159.1076831611467, 1187.1012228876098, 0.0, 964)
femmesh.addNode(2126.978684955494, 993.0564030030038, 0.0, 965)
femmesh.addNode(1634.6457288512383, 110.89599482247478, 0.0, 966)
femmesh.addNode(1678.8246368734358, 262.8116767379002, 0.0, 967)
femmesh.addNode(1525.6603895036765, 151.91568191542544, 0.0, 968)
femmesh.addNode(759.4763985686968, 3328.8704416253577, 0.0, 969)
femmesh.addNode(661.8600036097935, 3469.324155030564, 0.0, 970)
femmesh.addNode(1108.5707347990365, 5884.503381766299, 0.0, 971)
femmesh.addNode(1201.9525254898947, 5716.262073739866, 0.0, 972)
femmesh.addNode(1278.5669758760414, 5831.758691973567, 0.0, 973)
femmesh.addNode(4451.473859634742, 5307.065603719269, 0.0, 974)
femmesh.addNode(4289.288662363386, 5341.207181286951, 0.0, 975)
femmesh.addNode(4186.73205415216, 2862.8410621612984, 0.0, 976)
femmesh.addNode(4297.147967156858, 3017.976682467589, 0.0, 977)
femmesh.addNode(4134.597845267892, 3037.35339738533, 0.0, 978)
femmesh.addNode(4389.849428628335, 2382.7405333793067, 0.0, 979)
femmesh.addNode(4497.4640612598305, 2238.9729253917358, 0.0, 980)
femmesh.addNode(4543.4778637586305, 2397.9989315275543, 0.0, 981)
femmesh.addNode(7089.380735152702, 3206.0849014242317, 0.0, 982)
femmesh.addNode(6982.243530575071, 3358.5749510477517, 0.0, 983)
femmesh.addNode(4479.010980959631, 2944.5052674655126, 0.0, 984)
femmesh.addNode(4426.876772075362, 3119.017602689544, 0.0, 985)
femmesh.addNode(3713.275580915369, 5411.743563700424, 0.0, 986)
femmesh.addNode(3654.260735654869, 5281.420250111768, 0.0, 987)
femmesh.addNode(3824.2581009975756, 5282.875891105384, 0.0, 988)
femmesh.addNode(1795.6836681209206, 5162.475225220379, 0.0, 989)
femmesh.addNode(1715.3186077216833, 5320.1617113316315, 0.0, 990)
femmesh.addNode(1631.2019669122549, 5165.983974227966, 0.0, 991)
femmesh.addNode(5155.67633781917, 3318.635046916246, 0.0, 992)
femmesh.addNode(5331.703089951654, 3310.7162229624764, 0.0, 993)
femmesh.addNode(1451.5863154296026, 285.2617656365821, 0.0, 994)
femmesh.addNode(1407.4074074074051, 133.34608372115665, 0.0, 995)
femmesh.addNode(4819.780806328692, 2226.0488166014925, 0.0, 996)
femmesh.addNode(4961.450043272768, 2358.523635820728, 0.0, 997)
femmesh.addNode(4792.761733334202, 2386.706142759219, 0.0, 998)
femmesh.addNode(7193.8336631771035, 1711.654435699355, 0.0, 999)
femmesh.addNode(7050.252891582912, 1799.8711452321895, 0.0, 1000)
femmesh.addNode(769.8688488211501, 2997.7525410434955, 0.0, 1001)
femmesh.addNode(745.1103190217759, 3164.1647469831923, 0.0, 1002)
femmesh.addNode(6607.991909692246, 1629.0407309245452, 0.0, 1003)
femmesh.addNode(6687.837454635301, 1763.3652162622861, 0.0, 1004)
femmesh.addNode(6570.203197001804, 1786.0803690702962, 0.0, 1005)
femmesh.addNode(459.46512787396944, 3292.09370016171, 0.0, 1006)
femmesh.addNode(359.9816997132025, 3167.2774797455686, 0.0, 1007)
femmesh.addNode(4828.647426331876, 670.607742778215, 0.0, 1008)
femmesh.addNode(4877.054756804397, 838.1598752396977, 0.0, 1009)
femmesh.addNode(4732.776007943023, 773.7193998849145, 0.0, 1010)
femmesh.addNode(1428.63831947055, 2794.037581185944, 0.0, 1011)
femmesh.addNode(1365.0753902368253, 2613.75685318207, 0.0, 1012)
femmesh.addNode(7082.943895683857, 5382.099289171026, 0.0, 1013)
femmesh.addNode(7124.984080981181, 5509.190403819663, 0.0, 1014)
femmesh.addNode(6977.036508441659, 5449.240939137611, 0.0, 1015)
femmesh.addNode(6699.399472622235, 3535.573910920077, 0.0, 1016)
femmesh.addNode(6518.151422993756, 3517.6950121009704, 0.0, 1017)
femmesh.addNode(6630.067575902029, 3369.3116789397736, 0.0, 1018)
femmesh.addNode(2463.168680818446, 5829.274173380345, 0.0, 1019)
femmesh.addNode(2611.3168289665937, 5829.274173380345, 0.0, 1020)
femmesh.addNode(5893.829002835747, 3203.1345757858135, 0.0, 1021)
femmesh.addNode(6026.251636359767, 3066.092382727381, 0.0, 1022)
femmesh.addNode(6024.842823672007, 3237.241600821553, 0.0, 1023)
femmesh.addNode(5913.946589467609, 1725.6071264286018, 0.0, 1024)
femmesh.addNode(5758.782509205942, 1762.379323739814, 0.0, 1025)
femmesh.addNode(5740.868225255785, 1644.4309390673325, 0.0, 1026)
femmesh.addNode(4072.288656618182, 2737.2706740465646, 0.0, 1027)
femmesh.addNode(4234.838778507147, 2717.8939591288236, 0.0, 1028)
femmesh.addNode(2790.9315580240104, 2730.335955318843, 0.0, 1029)
femmesh.addNode(2613.688326181477, 2704.3100164719744, 0.0, 1030)
femmesh.addNode(2717.0631381004982, 2573.1789096057437, 0.0, 1031)
femmesh.addNode(4278.129306050152, 2567.045055264608, 0.0, 1032)
femmesh.addNode(4392.57270358413, 2692.6154433793413, 0.0, 1033)
femmesh.addNode(6175.578199600803, 3226.342446739124, 0.0, 1034)
femmesh.addNode(6043.155566076783, 3363.3846397975567, 0.0, 1035)
femmesh.addNode(2543.9141841214805, 2859.4267305002877, 0.0, 1036)
femmesh.addNode(2470.0457641979683, 2702.2696847871875, 0.0, 1037)
femmesh.addNode(4663.122836658937, 918.9178915642497, 0.0, 1038)
femmesh.addNode(4614.715506186416, 751.365759102767, 0.0, 1039)
femmesh.addNode(5053.868521382483, 5846.387886073035, 0.0, 1040)
femmesh.addNode(6588.259394475306, 4053.0409308713683, 0.0, 1041)
femmesh.addNode(6660.092902401975, 4208.992639188133, 0.0, 1042)
femmesh.addNode(6578.484429314133, 1262.0182523324374, 0.0, 1043)
femmesh.addNode(6459.753605201524, 1380.6642095756908, 0.0, 1044)
femmesh.addNode(6381.847220964715, 1262.3791241149386, 0.0, 1045)
femmesh.addNode(5556.499030169188, 4303.543755184006, 0.0, 1046)
femmesh.addNode(5659.907790092672, 4450.718847513932, 0.0, 1047)
femmesh.addNode(6573.374232804846, 1516.0296107628724, 0.0, 1048)
femmesh.addNode(6376.737024455428, 1516.3904825453737, 0.0, 1049)
femmesh.addNode(4413.086502824853, 555.5677690991592, 0.0, 1050)
femmesh.addNode(4261.819117303612, 616.1589636312466, 0.0, 1051)
femmesh.addNode(4280.266162194539, 447.7383998936408, 0.0, 1052)
femmesh.addNode(6974.170941434764, 2951.093372955296, 0.0, 1053)
femmesh.addNode(7113.273334411132, 2895.0361981627416, 0.0, 1054)
femmesh.addNode(7164.692427662138, 3026.1533037192853, 0.0, 1055)
femmesh.addNode(4539.504067702213, 665.0778001300985, 0.0, 1056)
femmesh.addNode(4557.951112593141, 496.6572363924926, 0.0, 1057)
femmesh.addNode(7052.984758457185, 2754.572614221862, 0.0, 1058)
femmesh.addNode(7243.506244684559, 2829.632544985851, 0.0, 1059)
femmesh.addNode(2616.53837412363, 2124.128708129868, 0.0, 1060)
femmesh.addNode(2446.5935242634673, 2024.781517600185, 0.0, 1061)
femmesh.addNode(759.6165307949587, 328.5053869248261, 0.0, 1062)
femmesh.addNode(756.7579981732581, 149.13847750972647, 0.0, 1063)
femmesh.addNode(891.7474215105881, 179.3669094150996, 0.0, 1064)
femmesh.addNode(5075.873002084907, 2610.7327305846384, 0.0, 1065)
femmesh.addNode(5081.48312034176, 2791.9903342956522, 0.0, 1066)
femmesh.addNode(4940.04108853513, 2700.4385656894683, 0.0, 1067)
femmesh.addNode(4525.189950623743, 1346.2284186195425, 0.0, 1068)
femmesh.addNode(4678.112483948264, 1302.4730525092223, 0.0, 1069)
femmesh.addNode(4822.3447324991175, 2813.0810229356016, 0.0, 1070)
femmesh.addNode(4816.734614242265, 2631.823419224588, 0.0, 1071)
femmesh.addNode(1426.7151240241897, 5831.758691973567, 0.0, 1072)
femmesh.addNode(1162.1070941853968, 4274.295959596459, 0.0, 1073)
femmesh.addNode(1285.883393391131, 4142.59766356247, 0.0, 1074)
femmesh.addNode(1318.324999551706, 4273.020784012423, 0.0, 1075)
femmesh.addNode(3541.157320391262, 2587.445256488564, 0.0, 1076)
femmesh.addNode(3545.0801542167633, 2407.5570100575505, 0.0, 1077)
femmesh.addNode(3661.737697806947, 2463.632537459175, 0.0, 1078)
femmesh.addNode(1881.547573828068, 4619.316744999498, 0.0, 1079)
femmesh.addNode(1788.049645474138, 4722.86655125881, 0.0, 1080)
femmesh.addNode(1740.033282911374, 4556.125685611923, 0.0, 1081)
femmesh.addNode(3695.929309040137, 4693.558284097877, 0.0, 1082)
femmesh.addNode(3707.047182136762, 4559.076469779107, 0.0, 1083)
femmesh.addNode(3848.5025799641194, 4591.040420523314, 0.0, 1084)
femmesh.addNode(468.0157067325813, 4543.527769139397, 0.0, 1085)
femmesh.addNode(312.73312274673236, 4468.8226814747795, 0.0, 1086)
femmesh.addNode(482.928531781281, 4381.580047044417, 0.0, 1087)
femmesh.addNode(3561.9393665340813, 4544.0994523600875, 0.0, 1088)
femmesh.addNode(3714.512637458063, 4441.581588785524, 0.0, 1089)
femmesh.addNode(2267.953057353296, 1822.5993799450785, 0.0, 1090)
femmesh.addNode(2201.2265422921937, 1656.864533661656, 0.0, 1091)
femmesh.addNode(6928.858800777107, 4192.020469391007, 0.0, 1092)
femmesh.addNode(7043.697085549042, 4224.173363280101, 0.0, 1093)
femmesh.addNode(6975.1820570796435, 4333.468961042013, 0.0, 1094)
femmesh.addNode(7117.348679148607, 4120.9592348249225, 0.0, 1095)
femmesh.addNode(7163.671935451143, 4262.407726475927, 0.0, 1096)
femmesh.addNode(4653.8890220421945, 2895.0060320790944, 0.0, 1097)
femmesh.addNode(4777.195496335058, 2963.6211785439746, 0.0, 1098)
femmesh.addNode(1179.372068490702, 3318.376135353779, 0.0, 1099)
femmesh.addNode(1326.125045816158, 3261.23875280796, 0.0, 1100)
femmesh.addNode(1325.6835526421776, 3397.947471162615, 0.0, 1101)
femmesh.addNode(7205.890985052491, 5316.700648382756, 0.0, 1102)
femmesh.addNode(7057.943412512968, 5256.751183700704, 0.0, 1103)
femmesh.addNode(1467.1204259988663, 5201.345981763479, 0.0, 1104)
femmesh.addNode(1547.4854863981036, 5043.659495652227, 0.0, 1105)
femmesh.addNode(3941.3868377553144, 134.00909568126906, 0.0, 1106)
femmesh.addNode(3915.8323079032516, 322.3759576620807, 0.0, 1107)
femmesh.addNode(574.1608443080829, 3612.1629474992815, 0.0, 1108)
femmesh.addNode(518.3908238568708, 3417.330587091048, 0.0, 1109)
femmesh.addNode(1543.0490639470322, 1956.6218142213843, 0.0, 1110)
femmesh.addNode(1497.6805347680336, 1796.0222911140188, 0.0, 1111)
femmesh.addNode(4279.9766975313905, 5054.442491501215, 0.0, 1112)
femmesh.addNode(4215.204820459885, 5189.617528876474, 0.0, 1113)
femmesh.addNode(4138.4774459254095, 5047.000276695886, 0.0, 1114)
femmesh.addNode(5822.921866940661, 1891.1822634226064, 0.0, 1115)
femmesh.addNode(5649.843502728837, 1810.0060760613374, 0.0, 1116)
femmesh.addNode(6816.241816425974, 2961.2370465230288, 0.0, 1117)
femmesh.addNode(3509.7811763868676, 1195.43977494296, 0.0, 1118)
femmesh.addNode(3376.5356854481365, 1333.0281792900532, 0.0, 1119)
femmesh.addNode(3328.3108039887934, 1206.8101323803776, 0.0, 1120)
femmesh.addNode(408.43271796666943, 5523.91902440261, 0.0, 1121)
femmesh.addNode(574.8445325302206, 5552.50561376648, 0.0, 1122)
femmesh.addNode(541.563081260525, 5654.385978451449, 0.0, 1123)
femmesh.addNode(4327.299310803042, 4935.10090069015, 0.0, 1124)
femmesh.addNode(4404.026685337518, 5077.718152870739, 0.0, 1125)
femmesh.addNode(1569.7389206772275, 3224.3792412475946, 0.0, 1126)
femmesh.addNode(1759.3509277540065, 3259.5449859110704, 0.0, 1127)
femmesh.addNode(1662.0485370444126, 3375.9758332802717, 0.0, 1128)
femmesh.addNode(4727.756861225607, 3264.4433311730622, 0.0, 1129)
femmesh.addNode(4856.6099322253785, 3312.4039170742035, 0.0, 1130)
femmesh.addNode(4673.295183474064, 3387.8361017063276, 0.0, 1131)
femmesh.addNode(4576.590949176214, 3192.710851746327, 0.0, 1132)
femmesh.addNode(6815.4489345996335, 3945.1364788760857, 0.0, 1133)
femmesh.addNode(6658.082615503696, 3864.600518778603, 0.0, 1134)
femmesh.addNode(6744.849638809901, 3764.493204178756, 0.0, 1135)
femmesh.addNode(2021.8758727393479, 4678.85978421246, 0.0, 1136)
femmesh.addNode(1973.8595101765836, 4512.118918565573, 0.0, 1137)
femmesh.addNode(670.9545445448027, 464.8444239474694, 0.0, 1138)
femmesh.addNode(805.9439678821326, 495.07285585284257, 0.0, 1139)
femmesh.addNode(5875.383137245497, 1040.4764154956729, 0.0, 1140)
femmesh.addNode(5947.485806623112, 883.1437814430759, 0.0, 1141)
femmesh.addNode(6028.076955357215, 1039.4477944456232, 0.0, 1142)
femmesh.addNode(5689.0930690883115, 4795.613133180677, 0.0, 1143)
femmesh.addNode(5586.825105917213, 4678.020385247588, 0.0, 1144)
femmesh.addNode(5701.614689116589, 4666.212705305882, 0.0, 1145)
femmesh.addNode(4567.450744666694, 2643.1162079929236, 0.0, 1146)
femmesh.addNode(4524.160217123689, 2793.965111857139, 0.0, 1147)
femmesh.addNode(957.8557582076861, 456.7949616018669, 0.0, 1148)
femmesh.addNode(869.1937719575301, 593.1339986245102, 0.0, 1149)
femmesh.addNode(6784.703633177838, 3645.9478197706594, 0.0, 1150)
femmesh.addNode(6942.069952273776, 3726.483779868142, 0.0, 1151)
femmesh.addNode(3154.285572331053, 1229.1632639877785, 0.0, 1152)
femmesh.addNode(168.17873822695674, 2487.7250861194134, 0.0, 1153)
femmesh.addNode(168.17873822695674, 2637.7250861194134, 0.0, 1154)
femmesh.addNode(6061.940450304278, 743.0529362921307, 0.0, 1155)
femmesh.addNode(6192.3732912876585, 647.8689271346946, 0.0, 1156)
femmesh.addNode(6230.612465718211, 786.9311512355903, 0.0, 1157)
femmesh.addNode(5703.144020968363, 4241.234248338516, 0.0, 1158)
femmesh.addNode(5539.174196897697, 4191.960265867452, 0.0, 1159)
femmesh.addNode(7344.263289576658, 4099.576167738087, 0.0, 1160)
femmesh.addNode(7409.626613927162, 4207.154914807617, 0.0, 1161)
femmesh.addNode(7302.686853401212, 4266.77234509028, 0.0, 1162)
femmesh.addNode(6785.922696482596, 2825.4671323144253, 0.0, 1163)
femmesh.addNode(6679.412664724812, 2966.727911438702, 0.0, 1164)
femmesh.addNode(7261.946192577225, 127.29232793012712, 0.0, 1165)
femmesh.addNode(7279.3414888969455, 310.60080454125836, 0.0, 1166)
femmesh.addNode(7128.506407430828, 183.30847661113125, 0.0, 1167)
femmesh.addNode(4079.9603001660425, 388.17389289413313, 0.0, 1168)
femmesh.addNode(3964.870784410508, 442.5316591936757, 0.0, 1169)
femmesh.addNode(4110.455931921699, 3166.2703050988034, 0.0, 1170)
femmesh.addNode(4000.0400189170014, 3011.1346847925124, 0.0, 1171)
femmesh.addNode(528.7511324145449, 1929.554921617074, 0.0, 1172)
femmesh.addNode(570.7860829859997, 2123.8700685458084, 0.0, 1173)
femmesh.addNode(435.53178964837986, 2051.4246139056763, 0.0, 1174)
femmesh.addNode(114.31190472681524, 1122.6873183087746, 0.0, 1175)
femmesh.addNode(276.63702303043885, 1218.860029998914, 0.0, 1176)
femmesh.addNode(162.32511830362358, 1296.1727116901393, 0.0, 1177)
femmesh.addNode(3997.419079162406, 599.6332181321835, 0.0, 1178)
femmesh.addNode(3833.2910868996155, 533.835282900131, 0.0, 1179)
femmesh.addNode(7891.280683239404, 972.4832181402292, 0.0, 1180)
femmesh.addNode(7754.417478253736, 866.2495195214813, 0.0, 1181)
femmesh.addNode(7863.136795014331, 793.7663013812521, 0.0, 1182)
femmesh.addNode(7517.632324270275, 4146.561970461825, 0.0, 1183)
femmesh.addNode(7476.055888094828, 4313.758147814018, 0.0, 1184)
femmesh.addNode(4759.905697927528, 3117.278667114203, 0.0, 1185)
femmesh.addNode(4807.268604639363, 2517.0460686772626, 0.0, 1186)
femmesh.addNode(4665.599367695287, 2384.571249458027, 0.0, 1187)
femmesh.addNode(3097.6803619001385, 985.6465881399299, 0.0, 1188)
femmesh.addNode(3199.6105566942188, 815.8912092758414, 0.0, 1189)
femmesh.addNode(5409.083910651663, 1309.9315018929087, 0.0, 1190)
femmesh.addNode(5581.364193507972, 1299.7282260306279, 0.0, 1191)
femmesh.addNode(5516.754655162726, 1422.8194335187636, 0.0, 1192)
femmesh.addNode(2372.6104433563205, 2248.4752354870116, 0.0, 1193)
femmesh.addNode(2514.655015052729, 2361.977875101287, 0.0, 1194)
femmesh.addNode(2397.7753345615556, 2433.6503311445995, 0.0, 1195)
femmesh.addNode(6264.637512059354, 4237.212271921096, 0.0, 1196)
femmesh.addNode(6313.305351695015, 4110.266494527674, 0.0, 1197)
femmesh.addNode(562.8393068003079, 5380.389301725061, 0.0, 1198)
femmesh.addNode(695.9696700941635, 5510.856255773901, 0.0, 1199)
femmesh.addNode(4562.910348888565, 1038.146385190079, 0.0, 1200)
femmesh.addNode(4680.970850645172, 1060.5000259722265, 0.0, 1201)
femmesh.addNode(3059.9680111623084, 2800.53576381081, 0.0, 1202)
femmesh.addNode(2910.5530697585855, 2731.233526703846, 0.0, 1203)
femmesh.addNode(3043.721311346755, 2668.507085559576, 0.0, 1204)
femmesh.addNode(6815.557189952093, 5200.361952950612, 0.0, 1205)
femmesh.addNode(6763.203182388588, 5344.741247979588, 0.0, 1206)
femmesh.addNode(3186.176620807394, 3775.2915889175815, 0.0, 1207)
femmesh.addNode(3269.4401309389364, 3667.2213649985406, 0.0, 1208)
femmesh.addNode(3340.0040445835557, 3751.528118307054, 0.0, 1209)
femmesh.addNode(7184.344705191021, 1090.475168424079, 0.0, 1210)
femmesh.addNode(7054.8099754950035, 1149.2275183669349, 0.0, 1211)
femmesh.addNode(7086.580866320439, 974.8885042175118, 0.0, 1212)
femmesh.addNode(2347.7190807535667, 5877.104601161822, 0.0, 1213)
femmesh.addNode(2440.5173912016444, 5706.378774542167, 0.0, 1214)
femmesh.addNode(162.32511830362358, 1446.1727116901393, 0.0, 1215)
femmesh.addNode(5588.577789599576, 1167.683918061269, 0.0, 1216)
femmesh.addNode(5696.2485341106385, 1280.5718496871239, 0.0, 1217)
femmesh.addNode(6579.2756554708685, 3226.9653028839857, 0.0, 1218)
femmesh.addNode(4141.542539563523, 3468.7338470185077, 0.0, 1219)
femmesh.addNode(4143.525055707367, 3283.8762303728618, 0.0, 1220)
femmesh.addNode(360.49816221846544, 2162.069147551185, 0.0, 1221)
femmesh.addNode(148.91014884901634, 4565.3852017848785, 0.0, 1222)
femmesh.addNode(163.82297389771603, 4403.437479689901, 0.0, 1223)
femmesh.addNode(6256.5712988329215, 2907.937184477545, 0.0, 1224)
femmesh.addNode(6177.907277025621, 2781.090477210967, 0.0, 1225)
femmesh.addNode(4389.521158412457, 2088.16887934669, 0.0, 1226)
femmesh.addNode(4543.149593542752, 2103.4272774949377, 0.0, 1227)
femmesh.addNode(1983.578551538019, 5184.251200620014, 0.0, 1228)
femmesh.addNode(1899.4619107285903, 5030.073463516348, 0.0, 1229)
femmesh.addNode(2208.5376516064134, 2178.229667052941, 0.0, 1230)
femmesh.addNode(2325.417332097587, 2106.557211009628, 0.0, 1231)
femmesh.addNode(2490.4484562988173, 890.6112406720142, 0.0, 1232)
femmesh.addNode(2657.6850014002966, 844.4148281447018, 0.0, 1233)
femmesh.addNode(2645.996503238914, 986.2136858256755, 0.0, 1234)
femmesh.addNode(4128.837644955021, 1074.492873589862, 0.0, 1235)
femmesh.addNode(3991.3796304229186, 1009.1489330615158, 0.0, 1236)
femmesh.addNode(4146.903823284771, 966.3216611775146, 0.0, 1237)
femmesh.addNode(3337.6461682757217, 4002.9907647535942, 0.0, 1238)
femmesh.addNode(3260.4058014139364, 4177.484496746967, 0.0, 1239)
femmesh.addNode(3179.5001675902276, 4034.0920742194676, 0.0, 1240)
femmesh.addNode(7544.643217867484, 5170.033371514532, 0.0, 1241)
femmesh.addNode(7384.282422942302, 5113.243901224452, 0.0, 1242)
femmesh.addNode(7474.513666094481, 4999.111230949837, 0.0, 1243)
femmesh.addNode(6512.6566961376675, 1893.1983239557367, 0.0, 1244)
femmesh.addNode(6432.811151194613, 1758.8738386179957, 0.0, 1245)
femmesh.addNode(7530.664730053624, 302.68919992417347, 0.0, 1246)
femmesh.addNode(7558.242488873522, 127.29232793012712, 0.0, 1247)
femmesh.addNode(7676.1259448838055, 175.39687199404634, 0.0, 1248)
femmesh.addNode(6561.890732472764, 2732.5938333156028, 0.0, 1249)
femmesh.addNode(6525.166512194633, 2885.886926050791, 0.0, 1250)
femmesh.addNode(2725.3706492273604, 5138.546618850626, 0.0, 1251)
femmesh.addNode(2866.7607761126837, 5055.429341247761, 0.0, 1252)
femmesh.addNode(2867.0510867614976, 5203.028345061386, 0.0, 1253)
femmesh.addNode(609.5996992139444, 5737.13916924805, 0.0, 1254)
femmesh.addNode(443.1878846503932, 5708.552579884181, 0.0, 1255)
femmesh.addNode(4229.701426105863, 2079.577254339057, 0.0, 1256)
femmesh.addNode(4190.29362229795, 2249.9361408999366, 0.0, 1257)
femmesh.addNode(4076.159422274278, 2105.798867995506, 0.0, 1258)
femmesh.addNode(4275.386958388785, 1944.0316064422593, 0.0, 1259)
femmesh.addNode(4569.080228656991, 141.62563849711006, 0.0, 1260)
femmesh.addNode(4562.624780292665, 335.1992411778868, 0.0, 1261)
femmesh.addNode(4437.9889960801065, 193.57360268077673, 0.0, 1262)
femmesh.addNode(6832.492626783963, 1672.5920133253508, 0.0, 1263)
femmesh.addNode(6794.703914093521, 1829.6316514711018, 0.0, 1264)
femmesh.addNode(3176.2952702026014, 1493.058661137371, 0.0, 1265)
femmesh.addNode(2991.3197293464177, 1571.11650216717, 0.0, 1266)
femmesh.addNode(163.82297389771603, 4253.437479689901, 0.0, 1267)
femmesh.addNode(4833.181801847207, 1057.6648600196936, 0.0, 1268)
femmesh.addNode(4732.969314076836, 1176.893353645523, 0.0, 1269)
femmesh.addNode(2282.5207325135602, 1954.5359491661143, 0.0, 1270)
femmesh.addNode(115.9124845934236, 1718.7618953528586, 0.0, 1271)
femmesh.addNode(312.6609041318862, 1747.3166288413295, 0.0, 1272)
femmesh.addNode(4780.179245449484, 3635.824575171586, 0.0, 1273)
femmesh.addNode(4912.750978766055, 3744.6462493580143, 0.0, 1274)
femmesh.addNode(4716.5860085374825, 3750.973557527157, 0.0, 1275)
femmesh.addNode(4889.246235075946, 3532.646977304956, 0.0, 1276)
femmesh.addNode(4693.081264847374, 3538.974285474099, 0.0, 1277)
femmesh.addNode(6059.801643933596, 490.61393325435097, 0.0, 1278)
femmesh.addNode(5923.128080676108, 455.2881835541659, 0.0, 1279)
femmesh.addNode(5959.2284487359775, 341.91140461728185, 0.0, 1280)
femmesh.addNode(3221.436837805577, 5598.300330460443, 0.0, 1281)
femmesh.addNode(3068.417395698776, 5693.014184977839, 0.0, 1282)
femmesh.addNode(3110.7504369090902, 5534.204680604581, 0.0, 1283)
femmesh.addNode(371.76596857225877, 3434.9324926304275, 0.0, 1284)
femmesh.addNode(7317.651238298455, 5237.759292350791, 0.0, 1285)
femmesh.addNode(7247.521686525451, 5066.837151786096, 0.0, 1286)
femmesh.addNode(7410.0943407253735, 127.29232793012712, 0.0, 1287)
femmesh.addNode(3243.5593811167273, 3005.8699659090607, 0.0, 1288)
femmesh.addNode(3141.2899743974594, 2876.8076119517714, 0.0, 1289)
femmesh.addNode(3295.4056594697454, 2866.8716766238294, 0.0, 1290)
femmesh.addNode(1698.734428743958, 5886.806208536376, 0.0, 1291)
femmesh.addNode(1728.6070371082642, 5708.345562204598, 0.0, 1292)
femmesh.addNode(5884.97501137318, 603.3507291307958, 0.0, 1293)
femmesh.addNode(5784.401816175561, 454.6482004937267, 0.0, 1294)
femmesh.addNode(2355.35059677633, 4225.0734431782985, 0.0, 1295)
femmesh.addNode(2514.5923714695286, 4165.463530980162, 0.0, 1296)
femmesh.addNode(3760.069811625558, 2380.114494868908, 0.0, 1297)
femmesh.addNode(3756.1469778000564, 2560.002741299922, 0.0, 1298)
femmesh.addNode(1188.0606402885796, 4043.4948276073155, 0.0, 1299)
femmesh.addNode(1344.278545654889, 4042.219652023279, 0.0, 1300)
femmesh.addNode(6435.633709493832, 5344.666852285705, 0.0, 1301)
femmesh.addNode(6363.9789921685515, 5227.122646176462, 0.0, 1302)
femmesh.addNode(1023.2558366759147, 2334.764373659973, 0.0, 1303)
femmesh.addNode(1183.4413177610704, 2388.5990147350294, 0.0, 1304)
femmesh.addNode(1012.2518444907732, 2475.253795524427, 0.0, 1305)
femmesh.addNode(5447.49013859961, 3514.3620451304078, 0.0, 1306)
femmesh.addNode(5337.175246863313, 3635.031972016678, 0.0, 1307)
femmesh.addNode(1058.426375697997, 2174.6331850053157, 0.0, 1308)
femmesh.addNode(887.2369024276996, 2261.2879657947133, 0.0, 1309)
femmesh.addNode(1276.8144426247336, 4876.1325116759135, 0.0, 1310)
femmesh.addNode(1417.5245644958754, 4967.19442828205, 0.0, 1311)
femmesh.addNode(1242.6938236135734, 4987.959586581604, 0.0, 1312)
femmesh.addNode(6511.585847895134, 5459.065567091169, 0.0, 1313)
femmesh.addNode(6355.809515756126, 5436.399747906672, 0.0, 1314)
femmesh.addNode(3441.8141523872355, 3785.501916245227, 0.0, 1315)
femmesh.addNode(3358.550642255693, 3893.5721401642686, 0.0, 1316)
femmesh.addNode(2072.1950684448407, 5869.430897408476, 0.0, 1317)
femmesh.addNode(3008.12173280929, 2939.534053096041, 0.0, 1318)
femmesh.addNode(996.1130470692466, 665.8660585339383, 0.0, 1319)
femmesh.addNode(844.2012567436931, 704.143952784914, 0.0, 1320)
femmesh.addNode(5709.319517815444, 4536.79779519094, 0.0, 1321)
femmesh.addNode(5811.587480986543, 4654.390543124027, 0.0, 1322)
femmesh.addNode(596.439921105476, 4571.805585538607, 0.0, 1323)
femmesh.addNode(426.2445120709274, 4659.048219968969, 0.0, 1324)
femmesh.addNode(1221.5453891116217, 1790.3072867562084, 0.0, 1325)
femmesh.addNode(1043.791879383892, 1869.1625698024618, 0.0, 1326)
femmesh.addNode(1071.3551700009334, 1695.097555194921, 0.0, 1327)
femmesh.addNode(3006.379835992304, 5246.3131250409, 0.0, 1328)
femmesh.addNode(2864.9897091069806, 5329.430402643765, 0.0, 1329)
femmesh.addNode(898.4967344318912, 3218.5433939862196, 0.0, 1330)
femmesh.addNode(1317.335698277427, 5490.613753003907, 0.0, 1331)
femmesh.addNode(1406.7797682504856, 5320.69029119995, 0.0, 1332)
femmesh.addNode(1461.3928365399588, 5493.593922143176, 0.0, 1333)
femmesh.addNode(1178.884561448304, 5418.754640000496, 0.0, 1334)
femmesh.addNode(1233.497629737777, 5591.658270943722, 0.0, 1335)
femmesh.addNode(1058.1947699721811, 2032.7645392519023, 0.0, 1336)
femmesh.addNode(908.0045508614928, 1937.5548076906148, 0.0, 1337)
femmesh.addNode(3461.894483050869, 780.8946967605501, 0.0, 1338)
femmesh.addNode(3533.9895199145294, 928.2969440172379, 0.0, 1339)
femmesh.addNode(1394.0382125084739, 4753.684097089825, 0.0, 1340)
femmesh.addNode(1219.2074716261718, 4774.449255389378, 0.0, 1341)
femmesh.addNode(3493.978311977406, 4286.493879323799, 0.0, 1342)
femmesh.addNode(3576.13525143023, 4127.126825599693, 0.0, 1343)
femmesh.addNode(3651.561761308206, 4267.237517642399, 0.0, 1344)
femmesh.addNode(1631.5184980493539, 1033.3255498663723, 0.0, 1345)
femmesh.addNode(1705.4379753611843, 1183.035709017033, 0.0, 1346)
femmesh.addNode(6710.968616617314, 987.4413781553214, 0.0, 1347)
femmesh.addNode(6623.140128209087, 1137.8941659711923, 0.0, 1348)
femmesh.addNode(6588.406533485551, 993.2803790558143, 0.0, 1349)
femmesh.addNode(4928.361408384451, 3035.3536579707097, 0.0, 1350)
femmesh.addNode(5935.426446248725, 3850.280107928354, 0.0, 1351)
femmesh.addNode(6094.14263333005, 3773.938376492033, 0.0, 1352)
femmesh.addNode(6107.091836893491, 3920.6421396509877, 0.0, 1353)
femmesh.addNode(4335.037347650592, 2475.128568798699, 0.0, 1354)
femmesh.addNode(4289.023545151793, 2316.10256266288, 0.0, 1355)
femmesh.addNode(4557.984735063791, 2528.338857445598, 0.0, 1356)
femmesh.addNode(3558.2317205434474, 127.57732217094528, 0.0, 1357)
femmesh.addNode(5183.692495808971, 4748.466888381023, 0.0, 1358)
femmesh.addNode(5106.6447814314515, 4588.612198649388, 0.0, 1359)
femmesh.addNode(5263.286524144512, 4647.04150355452, 0.0, 1360)
femmesh.addNode(1340.1037450278986, 1754.6164165998575, 0.0, 1361)
femmesh.addNode(1406.6854694687981, 1657.648146662829, 0.0, 1362)
femmesh.addNode(6279.25246840193, 3020.5076313173017, 0.0, 1363)
femmesh.addNode(6425.02950294069, 3146.1243174960746, 0.0, 1364)
femmesh.addNode(6303.042491734788, 3225.816093941893, 0.0, 1365)
femmesh.addNode(1650.1026750975852, 1618.217162510386, 0.0, 1366)
femmesh.addNode(1804.2072887407444, 1690.0899562149357, 0.0, 1367)
femmesh.addNode(3796.6695769108874, 4282.214535061419, 0.0, 1368)
femmesh.addNode(5496.876453414929, 5208.819638165714, 0.0, 1369)
femmesh.addNode(5338.0492891966305, 5217.334323113057, 0.0, 1370)
femmesh.addNode(4453.808254267839, 3753.6177792392655, 0.0, 1371)
femmesh.addNode(4282.325479649833, 3713.6439630603577, 0.0, 1372)
femmesh.addNode(4412.531500602907, 3602.1780671618208, 0.0, 1373)
femmesh.addNode(1370.4821045967824, 3869.1570290220366, 0.0, 1374)
femmesh.addNode(1468.3048576993335, 3968.2598649771912, 0.0, 1375)
femmesh.addNode(1063.501388660772, 5644.402960736454, 0.0, 1376)
femmesh.addNode(4564.228193847603, 3491.013699572957, 0.0, 1377)
femmesh.addNode(1434.6600230574693, 3456.0316054331925, 0.0, 1378)
femmesh.addNode(1287.9070457320136, 3513.1689879790115, 0.0, 1379)
femmesh.addNode(3609.1639577200885, 3128.3324509830463, 0.0, 1380)
femmesh.addNode(3453.3462246875315, 3141.998485852488, 0.0, 1381)
femmesh.addNode(3553.4927992215694, 2982.267995711677, 0.0, 1382)
femmesh.addNode(3651.663411853852, 3305.6513235470147, 0.0, 1383)
femmesh.addNode(3751.8099863878897, 3145.9208334062037, 0.0, 1384)
femmesh.addNode(6896.61978608611, 3497.5644866094626, 0.0, 1385)
femmesh.addNode(4678.027623799564, 2115.4879325752772, 0.0, 1386)
femmesh.addNode(4846.715933738131, 2087.305425636786, 0.0, 1387)
femmesh.addNode(7110.977009196776, 3718.3664754663196, 0.0, 1388)
femmesh.addNode(7071.123014828839, 3836.911859874416, 0.0, 1389)
femmesh.addNode(4570.084720952191, 1964.6838865302313, 0.0, 1390)
femmesh.addNode(4591.967404764113, 3771.0304619180833, 0.0, 1391)
femmesh.addNode(4461.761383811041, 3882.49635781662, 0.0, 1392)
femmesh.addNode(2835.4441561324056, 4256.052671140054, 0.0, 1393)
femmesh.addNode(2968.227739605435, 4150.943035689012, 0.0, 1394)
femmesh.addNode(2969.476217255413, 4313.695441663884, 0.0, 1395)
femmesh.addNode(6368.630721140412, 812.3985558775003, 0.0, 1396)
femmesh.addNode(6238.197880157031, 907.5825650349362, 0.0, 1397)
femmesh.addNode(1739.687419492304, 3484.6832197452836, 0.0, 1398)
femmesh.addNode(1550.0754124155249, 3449.517475081808, 0.0, 1399)
femmesh.addNode(3477.132905471398, 485.42818730874217, 0.0, 1400)
femmesh.addNode(3451.9363131983237, 662.5994009228642, 0.0, 1401)
femmesh.addNode(3342.5628134732774, 597.2460199057869, 0.0, 1402)
femmesh.addNode(121.71479210854815, 2089.1992671339794, 0.0, 1403)
femmesh.addNode(7898.3630887285935, 4276.313320222045, 0.0, 1404)
femmesh.addNode(7768.206642329093, 4190.822794014607, 0.0, 1405)
femmesh.addNode(7869.843553600499, 4114.509473792561, 0.0, 1406)
femmesh.addNode(3257.123902973455, 678.7465645726024, 0.0, 1407)
femmesh.addNode(3282.3204952465285, 501.5753509584804, 0.0, 1408)
femmesh.addNode(1773.9450711803765, 925.3412408805996, 0.0, 1409)
femmesh.addNode(1614.2815501568834, 892.9673074908075, 0.0, 1410)
femmesh.addNode(2599.227228426601, 1674.0069586221043, 0.0, 1411)
femmesh.addNode(5425.239431664051, 4063.9614119677835, 0.0, 1412)
femmesh.addNode(5571.884422463227, 4001.651905122295, 0.0, 1413)
femmesh.addNode(3623.233636638705, 5499.54557741606, 0.0, 1414)
femmesh.addNode(3793.2310019814113, 5501.001218409676, 0.0, 1415)
femmesh.addNode(6517.660098685575, 3872.397656174039, 0.0, 1416)
femmesh.addNode(7665.797024726524, 989.8389789703012, 0.0, 1417)
femmesh.addNode(1754.7623435742553, 5439.059478202118, 0.0, 1418)
femmesh.addNode(1590.2806423655893, 5442.568227209706, 0.0, 1419)
femmesh.addNode(7305.168095569948, 1017.3298507612782, 0.0, 1420)
femmesh.addNode(7273.397204744513, 1191.6688649107014, 0.0, 1421)
femmesh.addNode(1820.565481851162, 4897.177549381549, 0.0, 1422)
femmesh.addNode(1679.0511909344682, 4833.986489993975, 0.0, 1423)
femmesh.addNode(1140.7206739181902, 956.5196777126826, 0.0, 1424)
femmesh.addNode(1325.6563115599047, 905.4443209387064, 0.0, 1425)
femmesh.addNode(1481.691122809021, 855.2261346072366, 0.0, 1426)
femmesh.addNode(1624.1176959400436, 747.2418256214639, 0.0, 1427)
femmesh.addNode(6321.3552341395625, 3351.9591329178975, 0.0, 1428)
femmesh.addNode(2613.772386041399, 2264.6132013259485, 0.0, 1429)
femmesh.addNode(2638.9372772466345, 2449.7882969835364, 0.0, 1430)
femmesh.addNode(5637.369541506576, 1546.9096470175139, 0.0, 1431)
femmesh.addNode(5792.533621768243, 1510.1374497063016, 0.0, 1432)
femmesh.addNode(3316.45855903776, 5046.308464714304, 0.0, 1433)
femmesh.addNode(3309.858079841066, 4915.565619829715, 0.0, 1434)
femmesh.addNode(3434.9199386865375, 5011.449936320086, 0.0, 1435)
femmesh.addNode(3408.458202343758, 4582.828631622579, 0.0, 1436)
femmesh.addNode(3415.923657665059, 4465.333750628996, 0.0, 1437)
femmesh.addNode(5082.781363142034, 5004.12052194165, 0.0, 1438)
femmesh.addNode(5066.416863065117, 5151.316944416521, 0.0, 1439)
femmesh.addNode(4938.609750414993, 5064.299019236427, 0.0, 1440)
femmesh.addNode(3311.7048758492174, 5173.569791162096, 0.0, 1441)
femmesh.addNode(3186.6430170037456, 5077.685474671725, 0.0, 1442)
femmesh.addNode(2360.1748925855427, 807.7344819255851, 0.0, 1443)
femmesh.addNode(2243.266312451858, 699.6326774751249, 0.0, 1444)
femmesh.addNode(2385.228374326516, 640.7105785405802, 0.0, 1445)
femmesh.addNode(6438.397090328279, 1038.3915682742656, 0.0, 1446)
femmesh.addNode(6526.225578736505, 887.9387804583948, 0.0, 1447)
femmesh.addNode(4897.351881939873, 1636.669882841738, 0.0, 1448)
femmesh.addNode(4913.197095497665, 1504.682812366681, 0.0, 1449)
femmesh.addNode(4847.096170181207, 1792.376933372279, 0.0, 1450)
femmesh.addNode(5000.671874214939, 1739.8550898435142, 0.0, 1451)
femmesh.addNode(2166.0897217703937, 4935.913400223888, 0.0, 1452)
femmesh.addNode(2334.4817091594405, 4974.143465495107, 0.0, 1453)
femmesh.addNode(2255.7487815347354, 5090.0795041872025, 0.0, 1454)
femmesh.addNode(2383.3444094056395, 4813.840667295215, 0.0, 1455)
femmesh.addNode(2473.003469169981, 4968.006771258528, 0.0, 1456)
femmesh.addNode(7118.701950499171, 3955.7597135508445, 0.0, 1457)
femmesh.addNode(6949.794893576172, 3963.877017952667, 0.0, 1458)
femmesh.addNode(6163.451770469694, 534.9333243000888, 0.0, 1459)
femmesh.addNode(6332.123785883627, 578.8115392435484, 0.0, 1460)
femmesh.addNode(6040.913750298081, 3654.3208914955358, 0.0, 1461)
femmesh.addNode(6212.579140942847, 3724.68292321817, 0.0, 1462)
femmesh.addNode(3586.1807730215605, 2871.83232737039, 0.0, 1463)
femmesh.addNode(3560.43293888522, 2709.291738894367, 0.0, 1464)
femmesh.addNode(3683.5626981177857, 2806.061372366212, 0.0, 1465)
femmesh.addNode(2143.7431128245685, 4016.4244490888505, 0.0, 1466)
femmesh.addNode(2164.7790410159314, 3843.072753776877, 0.0, 1467)
femmesh.addNode(3405.922195992228, 2411.827897000945, 0.0, 1468)
femmesh.addNode(3526.5025734079127, 2288.0151779715557, 0.0, 1469)
femmesh.addNode(6813.568725362953, 2667.8055468069683, 0.0, 1470)
femmesh.addNode(6676.739573661791, 2673.296411722641, 0.0, 1471)
femmesh.addNode(5829.7182080909215, 1172.6539851079926, 0.0, 1472)
femmesh.addNode(5749.127059356819, 1016.3499721054452, 0.0, 1473)
femmesh.addNode(1866.7142820120202, 1550.2941269887056, 0.0, 1474)
femmesh.addNode(1934.9784797665275, 1703.0037793598594, 0.0, 1475)
femmesh.addNode(3056.2492526518117, 4060.122835968419, 0.0, 1476)
femmesh.addNode(3133.489619513597, 3885.6291039750467, 0.0, 1477)
femmesh.addNode(4043.4642910075977, 1203.3653094484052, 0.0, 1478)
femmesh.addNode(4198.988483869451, 1160.538037564404, 0.0, 1479)
femmesh.addNode(2008.4603652682604, 4918.953524781184, 0.0, 1480)
femmesh.addNode(3169.1064831417525, 3560.914516692741, 0.0, 1481)
femmesh.addNode(3322.933906917914, 3537.1510460822137, 0.0, 1482)
femmesh.addNode(4717.228376805138, 141.62563849711006, 0.0, 1483)
femmesh.addNode(5480.1999838948905, 4698.763551265309, 0.0, 1484)
femmesh.addNode(650.2837442083339, 5841.4862839076595, 0.0, 1485)
femmesh.addNode(798.4318923564817, 5841.4862839076595, 0.0, 1486)
femmesh.addNode(7687.410888046285, 519.1631733752985, 0.0, 1487)
femmesh.addNode(7529.901351227529, 473.4949445187359, 0.0, 1488)
femmesh.addNode(5236.785583998768, 4841.594949223745, 0.0, 1489)
femmesh.addNode(5080.143841285706, 4783.165644318613, 0.0, 1490)
femmesh.addNode(7863.136795014331, 643.7663013812521, 0.0, 1491)
femmesh.addNode(7717.289080728595, 5865.258076988463, 0.0, 1492)
femmesh.addNode(7865.437228876744, 5865.258076988463, 0.0, 1493)
femmesh.addNode(3424.2318731181076, 4696.042155875102, 0.0, 1494)
femmesh.addNode(3278.2161642490855, 4617.27645414401, 0.0, 1495)
femmesh.addNode(1484.2094958792095, 4283.627012612018, 0.0, 1496)
femmesh.addNode(1360.4331966734753, 4415.325308646008, 0.0, 1497)
femmesh.addNode(4689.042345170026, 444.70927220882595, 0.0, 1498)
femmesh.addNode(4844.384004559597, 355.00469642948656, 0.0, 1499)
femmesh.addNode(4839.710336860073, 516.4626916440924, 0.0, 1500)
femmesh.addNode(114.00169869151115, 5171.229962584443, 0.0, 1501)
femmesh.addNode(114.00169869151115, 5321.229962584443, 0.0, 1502)
femmesh.addNode(114.80051584458568, 678.4077201366822, 0.0, 1503)
femmesh.addNode(114.80051584458568, 828.4077201366822, 0.0, 1504)
femmesh.addNode(6626.585829424963, 270.84690597404375, 0.0, 1505)
femmesh.addNode(6769.987426014464, 346.85027768574, 0.0, 1506)
femmesh.addNode(6694.100175398309, 424.8661661509056, 0.0, 1507)
femmesh.addNode(5044.1055899837065, 4896.698150961445, 0.0, 1508)
femmesh.addNode(5171.91270263383, 4983.71607614154, 0.0, 1509)
femmesh.addNode(7292.792494202946, 3289.4626588171154, 0.0, 1510)
femmesh.addNode(7228.48312812907, 3150.027726631677, 0.0, 1511)
femmesh.addNode(7368.104186712382, 3109.531061112169, 0.0, 1512)
femmesh.addNode(7865.437228876744, 5715.258076988463, 0.0, 1513)
femmesh.addNode(355.20455953102805, 1074.5042764004547, 0.0, 1514)
femmesh.addNode(403.2177731078364, 1247.9896697818197, 0.0, 1515)
femmesh.addNode(6794.512841800568, 4173.279930314427, 0.0, 1516)
femmesh.addNode(6725.923832909115, 4337.028776026626, 0.0, 1517)
femmesh.addNode(4091.9829470874884, 831.5629098729619, 0.0, 1518)
femmesh.addNode(4229.44096161959, 896.9068504013081, 0.0, 1519)
femmesh.addNode(7578.902962802856, 3190.3595315212474, 0.0, 1520)
femmesh.addNode(7712.524926578211, 3299.7122043072122, 0.0, 1521)
femmesh.addNode(7566.035516561613, 3358.318666083572, 0.0, 1522)
femmesh.addNode(3071.033379779111, 5878.55491741685, 0.0, 1523)
femmesh.addNode(2960.3469788826237, 5814.459267560988, 0.0, 1524)
femmesh.addNode(2981.024726570038, 5500.816858872628, 0.0, 1525)
femmesh.addNode(3134.044168676839, 5406.103004355232, 0.0, 1526)
femmesh.addNode(5480.841458535873, 2809.2176037924755, 0.0, 1527)
femmesh.addNode(5326.4780258501, 2822.78532330049, 0.0, 1528)
femmesh.addNode(5371.678466577312, 2688.716779682808, 0.0, 1529)
femmesh.addNode(7089.625607985102, 3590.318951147354, 0.0, 1530)
femmesh.addNode(7218.678670540165, 3700.747031153628, 0.0, 1531)
femmesh.addNode(6171.611152111733, 993.4709740972244, 0.0, 1532)
femmesh.addNode(6099.508482734118, 1150.8036081498212, 0.0, 1533)
femmesh.addNode(7849.828740168485, 3533.8356694347685, 0.0, 1534)
femmesh.addNode(7849.828740168485, 3383.8356694347685, 0.0, 1535)
femmesh.addNode(1585.8702320202276, 3887.8729486901784, 0.0, 1536)
femmesh.addNode(1559.6666730783343, 4060.9355716914215, 0.0, 1537)
femmesh.addNode(2864.120943431154, 5889.679594507472, 0.0, 1538)
femmesh.addNode(2861.5049593508197, 5704.13886206846, 0.0, 1539)
femmesh.addNode(993.2201180270279, 111.58761124481902, 0.0, 1540)
femmesh.addNode(996.0786506487285, 290.9545206599186, 0.0, 1541)
femmesh.addNode(4983.989085721447, 580.9031669988756, 0.0, 1542)
femmesh.addNode(122.33388559878529, 2370.5435551724368, 0.0, 1543)
femmesh.addNode(290.512623825742, 2458.26864129185, 0.0, 1544)
femmesh.addNode(1782.7938769993864, 110.89599482247478, 0.0, 1545)
femmesh.addNode(1316.601876545882, 3148.0695725105425, 0.0, 1546)
femmesh.addNode(1462.9133606973573, 3227.6409083193785, 0.0, 1547)
femmesh.addNode(1070.1120371609406, 2639.154529861933, 0.0, 1548)
femmesh.addNode(909.9265560757848, 2585.319888786877, 0.0, 1549)
femmesh.addNode(812.4742145734006, 2715.815065851335, 0.0, 1550)
femmesh.addNode(754.6140219032332, 2551.914331513829, 0.0, 1551)
femmesh.addNode(7840.438080432255, 2642.582452250086, 0.0, 1552)
femmesh.addNode(7840.438080432255, 2492.582452250086, 0.0, 1553)
femmesh.addNode(4779.935343874797, 4278.6125057756035, 0.0, 1554)
femmesh.addNode(4873.3401526440375, 4153.424160498336, 0.0, 1555)
femmesh.addNode(608.6098500251103, 149.13847750972647, 0.0, 1556)
femmesh.addNode(5048.853008014237, 3090.21882631235, 0.0, 1557)
femmesh.addNode(5200.554142762498, 3104.2243246640464, 0.0, 1558)
femmesh.addNode(5062.772744725182, 3203.0166448926348, 0.0, 1559)
femmesh.addNode(811.696895751641, 2845.44400293366, 0.0, 1560)
femmesh.addNode(925.9587018154612, 2901.529161234219, 0.0, 1561)
femmesh.addNode(6738.821444457219, 4478.982501699639, 0.0, 1562)
femmesh.addNode(6873.241383855811, 4443.269792825933, 0.0, 1563)
femmesh.addNode(3542.1884800124444, 2169.097555396752, 0.0, 1564)
femmesh.addNode(3538.567181266625, 1989.819108222927, 0.0, 1565)
femmesh.addNode(3662.039232886629, 2104.4658438543365, 0.0, 1566)
femmesh.addNode(656.3843615790895, 2812.038445660612, 0.0, 1567)
femmesh.addNode(4827.689456832637, 1936.153340992313, 0.0, 1568)
femmesh.addNode(4969.442639361765, 2046.7142250185284, 0.0, 1569)
femmesh.addNode(3403.0305217879086, 2173.3684423401464, 0.0, 1570)
femmesh.addNode(2633.154758149378, 2992.579519191497, 0.0, 1571)
femmesh.addNode(2489.512196165869, 2990.5391875067107, 0.0, 1572)
femmesh.addNode(5363.2153602114395, 4698.501241347194, 0.0, 1573)
femmesh.addNode(4275.897087489348, 4766.671284277816, 0.0, 1574)
femmesh.addNode(4399.947075295477, 4789.946945647339, 0.0, 1575)
femmesh.addNode(2574.610553751165, 5059.628592247711, 0.0, 1576)
femmesh.addNode(2574.900864399978, 5207.227596061336, 0.0, 1577)
femmesh.addNode(579.4980754341702, 604.6866165789229, 0.0, 1578)
femmesh.addNode(533.1706383469964, 438.11914765090637, 0.0, 1579)
femmesh.addNode(2623.473253997364, 4899.325794047818, 0.0, 1580)
femmesh.addNode(1143.860737551397, 541.9293791621222, 0.0, 1581)
femmesh.addNode(1059.328454724126, 389.0156634315863, 0.0, 1582)
femmesh.addNode(1182.0836299924395, 376.088938220174, 0.0, 1583)
femmesh.addNode(3453.7847756974397, 4374.006847527243, 0.0, 1584)
femmesh.addNode(3378.3582658194637, 4233.896155484537, 0.0, 1585)
femmesh.addNode(1581.6061054092638, 4931.832420746537, 0.0, 1586)
femmesh.addNode(1549.0902690322398, 4757.521422623797, 0.0, 1587)
femmesh.addNode(1733.6236863622744, 5602.272350708317, 0.0, 1588)
femmesh.addNode(1858.6616517183684, 5479.865834830243, 0.0, 1589)
femmesh.addNode(3492.3993167279473, 5276.240351955741, 0.0, 1590)
femmesh.addNode(3603.381836810154, 5147.372679360702, 0.0, 1591)
femmesh.addNode(7869.843553600499, 3964.509473792561, 0.0, 1592)
femmesh.addNode(3765.525612099783, 2678.0752710986153, 0.0, 1593)
femmesh.addNode(3791.273446236123, 2840.6158595746383, 0.0, 1594)
femmesh.addNode(3596.464575048989, 669.0768641635054, 0.0, 1595)
femmesh.addNode(3408.605074894644, 239.98396416744743, 0.0, 1596)
femmesh.addNode(3405.9289099067446, 112.40664199650215, 0.0, 1597)
femmesh.addNode(1890.1114806628764, 281.97501553463826, 0.0, 1598)
femmesh.addNode(1930.9420251475344, 110.89599482247478, 0.0, 1599)
femmesh.addNode(2033.2435295894134, 171.0790207121635, 0.0, 1600)
femmesh.addNode(7412.0178952172455, 425.3904004548167, 0.0, 1601)
femmesh.addNode(6988.079668627747, 4475.422686715026, 0.0, 1602)
femmesh.addNode(7690.393887947564, 4302.895921956158, 0.0, 1603)
femmesh.addNode(7661.874352819469, 4141.092075526674, 0.0, 1604)
femmesh.addNode(4455.465696813373, 4681.499625112829, 0.0, 1605)
femmesh.addNode(4506.867920127068, 4849.929241525163, 0.0, 1606)
femmesh.addNode(3328.0936332697365, 2756.4360082825424, 0.0, 1607)
femmesh.addNode(3430.363039989004, 2885.498362239832, 0.0, 1608)
femmesh.addNode(3109.63261361045, 112.40664199650215, 0.0, 1609)
femmesh.addNode(3154.246664311765, 271.9523722257575, 0.0, 1610)
femmesh.addNode(3007.577013664273, 159.54573022925535, 0.0, 1611)
femmesh.addNode(3257.7807617585977, 112.40664199650215, 0.0, 1612)
femmesh.addNode(2823.977887225684, 3928.3200418063625, 0.0, 1613)
femmesh.addNode(2691.547170114245, 4048.2965971635554, 0.0, 1614)
femmesh.addNode(2681.3293788575006, 3883.316373804735, 0.0, 1615)
femmesh.addNode(5770.520367692014, 743.441574281741, 0.0, 1616)
femmesh.addNode(5631.432954158519, 802.5073737707789, 0.0, 1617)
femmesh.addNode(5607.161333339139, 661.7765455593944, 0.0, 1618)
femmesh.addNode(7494.214998489888, 2804.8282493728148, 0.0, 1619)
femmesh.addNode(7542.828343796501, 2666.327684644012, 0.0, 1620)
femmesh.addNode(7632.262815557899, 2723.665469228975, 0.0, 1621)
femmesh.addNode(5272.00466439603, 529.2950532437749, 0.0, 1622)
femmesh.addNode(5092.275946137551, 523.0709465898495, 0.0, 1623)
femmesh.addNode(5174.780714508124, 432.9822225186782, 0.0, 1624)
femmesh.addNode(2359.24903030485, 2744.784484829699, 0.0, 1625)
femmesh.addNode(2429.0231723648467, 2589.667770801386, 0.0, 1626)
femmesh.addNode(6844.049120263435, 2533.7833363601835, 0.0, 1627)
femmesh.addNode(6953.2322430842405, 2685.9540569519677, 0.0, 1628)
femmesh.addNode(5090.658667021085, 4046.2554846953394, 0.0, 1629)
femmesh.addNode(5274.166461003479, 4082.5360796660957, 0.0, 1630)
femmesh.addNode(1312.5404544108571, 1928.6814312073982, 0.0, 1631)
femmesh.addNode(4186.016346788139, 354.7381774574521, 0.0, 1632)
femmesh.addNode(4337.283732309381, 294.14698292536474, 0.0, 1633)
femmesh.addNode(6169.200301701011, 4284.603497427983, 0.0, 1634)
femmesh.addNode(6044.279385934682, 4357.956278695052, 0.0, 1635)
femmesh.addNode(793.2064336009732, 2051.578576698631, 0.0, 1636)
femmesh.addNode(778.8035430126841, 1887.9766072491902, 0.0, 1637)
femmesh.addNode(5090.98101155189, 319.41223908800873, 0.0, 1638)
femmesh.addNode(5270.709729810371, 325.636345741934, 0.0, 1639)
femmesh.addNode(5830.429676805801, 5470.353705492884, 0.0, 1640)
femmesh.addNode(5959.344577402303, 5553.370854602572, 0.0, 1641)
femmesh.addNode(5808.545390007021, 5596.951125871068, 0.0, 1642)
femmesh.addNode(1586.4265237792115, 4542.803514558419, 0.0, 1643)
femmesh.addNode(1679.9244521331416, 4439.253708299107, 0.0, 1644)
femmesh.addNode(3715.708720176329, 1366.3435731423415, 0.0, 1645)
femmesh.addNode(3785.8898641643755, 1514.1307211260723, 0.0, 1646)
femmesh.addNode(3628.1872018342574, 1469.4449698363665, 0.0, 1647)
femmesh.addNode(3051.8410125877162, 3160.8284470078934, 0.0, 1648)
femmesh.addNode(2924.777956909063, 3227.235463009237, 0.0, 1649)
femmesh.addNode(2962.380640365788, 3082.2129172383457, 0.0, 1650)
femmesh.addNode(831.6453666937916, 3943.1529315482658, 0.0, 1651)
femmesh.addNode(703.4392694579965, 3818.818790668602, 0.0, 1652)
femmesh.addNode(845.8361212968007, 3788.490656318461, 0.0, 1653)
femmesh.addNode(6356.822450816, 3952.5692382407356, 0.0, 1654)
femmesh.addNode(6426.64567184439, 3764.1288261479704, 0.0, 1655)
femmesh.addNode(3037.14887670638, 2527.5000332879326, 0.0, 1656)
femmesh.addNode(3186.563818110103, 2596.802270394897, 0.0, 1657)
femmesh.addNode(7278.57811007085, 481.40654913582085, 0.0, 1658)
femmesh.addNode(4218.683578414832, 4271.176513518723, 0.0, 1659)
femmesh.addNode(4382.488484102001, 4242.155849150317, 0.0, 1660)
femmesh.addNode(4349.777837815003, 4367.717763053066, 0.0, 1661)
femmesh.addNode(4364.987729888175, 4107.763165138471, 0.0, 1662)
femmesh.addNode(4496.081989288346, 4204.3044146728125, 0.0, 1663)
femmesh.addNode(3315.449841765593, 5737.124823896522, 0.0, 1664)
femmesh.addNode(3357.7828829759073, 5578.315319523264, 0.0, 1665)
femmesh.addNode(2816.180509089168, 2475.814235830405, 0.0, 1666)
femmesh.addNode(7766.538011398985, 106.59156586830207, 0.0, 1667)
femmesh.addNode(7738.960252579088, 281.9884378623484, 0.0, 1668)
femmesh.addNode(6427.9687325657405, 291.9108291802993, 0.0, 1669)
femmesh.addNode(6411.878702420593, 117.47943196069464, 0.0, 1670)
femmesh.addNode(6534.60854866366, 174.43139721960466, 0.0, 1671)
femmesh.addNode(7127.04717168945, 501.08781094078637, 0.0, 1672)
femmesh.addNode(6972.84720580808, 568.2141032609561, 0.0, 1673)
femmesh.addNode(6991.70173786918, 433.7432455424322, 0.0, 1674)
femmesh.addNode(1367.0590479896537, 1553.2145727521242, 0.0, 1675)
femmesh.addNode(1524.6358377297884, 1594.6204472662853, 0.0, 1676)
femmesh.addNode(5391.4515600302475, 2134.1750157218303, 0.0, 1677)
femmesh.addNode(5534.392713752847, 2195.7455813773613, 0.0, 1678)
femmesh.addNode(5445.046975167876, 2300.5219187574585, 0.0, 1679)
femmesh.addNode(148.91014884901634, 4715.3852017848785, 0.0, 1680)
femmesh.addNode(7259.72357800975, 615.8774068543447, 0.0, 1681)
femmesh.addNode(7229.035259801647, 4369.986473545458, 0.0, 1682)
femmesh.addNode(6851.374173982636, 1082.8480096155336, 0.0, 1683)
femmesh.addNode(6816.6405792590995, 938.2342227001556, 0.0, 1684)
femmesh.addNode(5003.97520307091, 3304.6295485645496, 0.0, 1685)
femmesh.addNode(1215.4423402492498, 244.93369496597566, 0.0, 1686)
femmesh.addNode(1299.9746230765209, 397.8474106965116, 0.0, 1687)
femmesh.addNode(7406.686048593622, 2642.9304053595815, 0.0, 1688)
femmesh.addNode(7544.733865661633, 2561.767625215742, 0.0, 1689)
femmesh.addNode(1949.9769995900783, 3549.036982448878, 0.0, 1690)
femmesh.addNode(1932.7886822022792, 3728.836281757617, 0.0, 1691)
femmesh.addNode(1834.4722267333923, 3590.9408772524866, 0.0, 1692)
femmesh.addNode(2985.204476995612, 1174.7601365562223, 0.0, 1693)
femmesh.addNode(2635.3005585803803, 3276.5854115362845, 0.0, 1694)
femmesh.addNode(2707.0351774404508, 3136.1559547188044, 0.0, 1695)
femmesh.addNode(2763.5829658270645, 3289.049390057169, 0.0, 1696)
femmesh.addNode(5682.111899272497, 938.9651617754096, 0.0, 1697)
femmesh.addNode(5518.752864919621, 857.3001330530628, 0.0, 1698)
femmesh.addNode(5002.480728881244, 3885.2988091664174, 0.0, 1699)
femmesh.addNode(4937.335679993895, 4020.751907242507, 0.0, 1700)
femmesh.addNode(2491.7374670008803, 3217.7505103934163, 0.0, 1701)
femmesh.addNode(2548.285255387494, 3370.643945731781, 0.0, 1702)
femmesh.addNode(7325.390589247099, 2352.6739416713235, 0.0, 1703)
femmesh.addNode(7187.218553978806, 2289.412475933546, 0.0, 1704)
femmesh.addNode(7343.252508842312, 2216.3367649781776, 0.0, 1705)
femmesh.addNode(7862.696186409727, 3215.8765348724437, 0.0, 1706)
femmesh.addNode(3555.4877180696603, 3860.7224939691796, 0.0, 1707)
femmesh.addNode(3536.9411203975233, 3718.6784721119648, 0.0, 1708)
femmesh.addNode(4785.1622145041965, 3992.4674849694147, 0.0, 1709)
femmesh.addNode(4469.850896418127, 810.2762918094336, 0.0, 1710)
femmesh.addNode(5465.089258650267, 1557.112922879795, 0.0, 1711)
femmesh.addNode(1224.8332150771453, 2639.3841349408044, 0.0, 1712)
femmesh.addNode(1122.507926662157, 2749.4502282032545, 0.0, 1713)
femmesh.addNode(6810.400773782209, 221.16350054683153, 0.0, 1714)
femmesh.addNode(6877.915119755555, 375.1827607236934, 0.0, 1715)
femmesh.addNode(3413.5994020923436, 1646.0351097431233, 0.0, 1716)
femmesh.addNode(3291.103546395683, 1707.9164171407156, 0.0, 1717)
femmesh.addNode(3280.021083051801, 1579.837478149679, 0.0, 1718)
femmesh.addNode(1109.621909284879, 5272.654820071086, 0.0, 1719)
femmesh.addNode(1025.783840745229, 5373.699338010901, 0.0, 1720)
femmesh.addNode(5877.746522845147, 5348.49771386187, 0.0, 1721)
femmesh.addNode(5726.947335449865, 5392.077985130366, 0.0, 1722)
femmesh.addNode(3300.8286688587, 1839.1952163605347, 0.0, 1723)
femmesh.addNode(3167.2503498181572, 1772.9975847670905, 0.0, 1724)
femmesh.addNode(7067.547691130674, 3468.9488598983344, 0.0, 1725)
femmesh.addNode(5325.347643893138, 5373.0442253657975, 0.0, 1726)
femmesh.addNode(5197.886830488666, 5246.848349374484, 0.0, 1727)
femmesh.addNode(3130.962880333418, 3257.6835002181897, 0.0, 1728)
femmesh.addNode(3168.5655637901436, 3112.6609544472994, 0.0, 1729)
femmesh.addNode(5127.942595456558, 5547.667011886648, 0.0, 1730)
femmesh.addNode(5310.05585510093, 5538.632795030586, 0.0, 1731)
femmesh.addNode(875.7014700480215, 1357.230480733051, 0.0, 1732)
femmesh.addNode(792.9798776208206, 1511.220473816049, 0.0, 1733)
femmesh.addNode(727.7360799759065, 1375.403157613251, 0.0, 1734)
femmesh.addNode(2853.043338048992, 3367.6649198267164, 0.0, 1735)
femmesh.addNode(846.9232326575204, 854.4255488523577, 0.0, 1736)
femmesh.addNode(720.0039575458038, 781.6934889429297, 0.0, 1737)
femmesh.addNode(7238.676800401273, 3993.994076746672, 0.0, 1738)
femmesh.addNode(1186.0725848267357, 2845.091007512148, 0.0, 1739)
femmesh.addNode(1031.3514069105308, 2844.8614024332765, 0.0, 1740)
femmesh.addNode(2064.8473315324773, 1667.8232271335578, 0.0, 1741)
femmesh.addNode(2133.8646837975193, 1552.2492566617275, 0.0, 1742)
femmesh.addNode(1772.709099265866, 3142.5364870905537, 0.0, 1743)
femmesh.addNode(1865.0187156330508, 3294.133079123231, 0.0, 1744)
femmesh.addNode(2002.3403382612016, 1807.6190563597877, 0.0, 1745)
femmesh.addNode(781.0185221901592, 4482.936507206478, 0.0, 1746)
femmesh.addNode(862.1213995712776, 4639.095030285949, 0.0, 1747)
femmesh.addNode(719.3139931482483, 4612.443657788505, 0.0, 1748)
femmesh.addNode(2286.9786006684376, 2476.16513118711, 0.0, 1749)
femmesh.addNode(5053.715217761624, 5307.026846669262, 0.0, 1750)
femmesh.addNode(5193.016449893154, 1392.7716410702897, 0.0, 1751)
femmesh.addNode(5096.651168149363, 1249.338457374906, 0.0, 1752)
femmesh.addNode(5248.109090562626, 1289.589525685661, 0.0, 1753)
femmesh.addNode(5273.548817682424, 901.0290274025733, 0.0, 1754)
femmesh.addNode(5259.508116119532, 1052.7392885210738, 0.0, 1755)
femmesh.addNode(5165.784510381725, 994.4460808717768, 0.0, 1756)
femmesh.addNode(3561.5251056292645, 1695.6730832771284, 0.0, 1757)
femmesh.addNode(3550.442642285382, 1567.5941442860922, 0.0, 1758)
femmesh.addNode(3541.549899788518, 1849.6507227073357, 0.0, 1759)
femmesh.addNode(3678.3931399815565, 1771.2097572503046, 0.0, 1760)
femmesh.addNode(5775.6577494459125, 3087.5547388411537, 0.0, 1761)
femmesh.addNode(755.289428271755, 4916.919343668762, 0.0, 1762)
femmesh.addNode(596.1943038974507, 4979.4414663977495, 0.0, 1763)
femmesh.addNode(641.3217461550627, 4831.124303596964, 0.0, 1764)
femmesh.addNode(7897.375199756655, 3673.5283138999966, 0.0, 1765)
femmesh.addNode(7747.203939925139, 3607.363983334765, 0.0, 1766)
femmesh.addNode(2093.7003044795997, 280.75655688079513, 0.0, 1767)
femmesh.addNode(2231.653709023638, 375.98192321369163, 0.0, 1768)
femmesh.addNode(2130.3663896487933, 437.3834077572235, 0.0, 1769)
femmesh.addNode(2647.164223200557, 4764.455391384985, 0.0, 1770)
femmesh.addNode(2748.7713077817407, 4856.077212374168, 0.0, 1771)
femmesh.addNode(602.2556215340285, 1017.804500596867, 0.0, 1772)
femmesh.addNode(766.5918029313693, 1076.6941247703132, 0.0, 1773)
femmesh.addNode(646.1214910057665, 1162.5235403568067, 0.0, 1774)
femmesh.addNode(6253.07953817037, 1242.9259345798073, 0.0, 1775)
femmesh.addNode(6109.545341415853, 1288.9027549282061, 0.0, 1776)
femmesh.addNode(4617.374715328257, 4231.918418454952, 0.0, 1777)
femmesh.addNode(4676.143372048713, 4385.39118600531, 0.0, 1778)
femmesh.addNode(3480.261498297336, 1419.8069963023613, 0.0, 1779)
femmesh.addNode(3298.7911258992617, 1431.177353739779, 0.0, 1780)
femmesh.addNode(6371.810362282979, 1124.2799773365537, 0.0, 1781)
femmesh.addNode(5533.2507218046685, 1855.3009073601388, 0.0, 1782)
femmesh.addNode(5408.632309050067, 1927.2335542186759, 0.0, 1783)
femmesh.addNode(5461.085732239517, 1753.1357832370818, 0.0, 1784)
femmesh.addNode(789.1325707601827, 5584.814155579445, 0.0, 1785)
femmesh.addNode(951.1232650545894, 5627.831253438085, 0.0, 1786)
femmesh.addNode(869.9655901184829, 5725.989665673958, 0.0, 1787)
femmesh.addNode(917.2734639830696, 1633.5003175885117, 0.0, 1788)
femmesh.addNode(769.3080739109546, 1651.6729944687115, 0.0, 1789)
femmesh.addNode(1722.4991021045048, 3664.4825190540228, 0.0, 1790)
femmesh.addNode(5597.390079539387, 1984.1038470429312, 0.0, 1791)
femmesh.addNode(1480.6892533139965, 590.8704505042731, 0.0, 1792)
femmesh.addNode(1595.947578875917, 485.7492466740875, 0.0, 1793)
femmesh.addNode(1606.7855941541018, 604.3794489077075, 0.0, 1794)
femmesh.addNode(5874.240405702203, 153.2928274585484, 0.0, 1795)
femmesh.addNode(5726.092257554055, 153.2928274585484, 0.0, 1796)
femmesh.addNode(1561.1113557523217, 3676.711051761384, 0.0, 1797)
femmesh.addNode(1655.89616299341, 3782.968709268587, 0.0, 1798)
femmesh.addNode(5596.148642568822, 3872.872344251229, 0.0, 1799)
femmesh.addNode(5710.083407802466, 4000.8711981508977, 0.0, 1800)
femmesh.addNode(6948.870902325116, 913.5905038796445, 0.0, 1801)
femmesh.addNode(7078.405632021133, 854.8381539367886, 0.0, 1802)
femmesh.addNode(2621.1276671674095, 684.3906546747112, 0.0, 1803)
femmesh.addNode(2776.675714107506, 779.9930998283725, 0.0, 1804)
femmesh.addNode(2856.853157040181, 2318.7841141378126, 0.0, 1805)
femmesh.addNode(2934.979017894045, 2442.17472676002, 0.0, 1806)
femmesh.addNode(2040.942290037624, 2127.6293287785284, 0.0, 1807)
femmesh.addNode(2088.1354012963575, 2269.5473532559117, 0.0, 1808)
femmesh.addNode(7032.115085636925, 308.0564684035237, 0.0, 1809)
femmesh.addNode(7014.719789317204, 124.74799179239241, 0.0, 1810)
femmesh.addNode(3077.8215246573927, 2370.469911595341, 0.0, 1811)
femmesh.addNode(7288.974102421174, 2482.059999279001, 0.0, 1812)
femmesh.addNode(7445.008057284679, 2408.9842883236333, 0.0, 1813)
femmesh.addNode(7183.533247001819, 5869.978509446724, 0.0, 1814)
femmesh.addNode(7331.681395149968, 5869.978509446724, 0.0, 1815)
femmesh.addNode(902.7816148150912, 4376.480890332696, 0.0, 1816)
femmesh.addNode(759.9742083920619, 4349.829517835253, 0.0, 1817)
femmesh.addNode(2449.1785401723328, 3099.237336505863, 0.0, 1818)
femmesh.addNode(2359.9379661444354, 2966.084547814653, 0.0, 1819)
femmesh.addNode(7865.3141538852915, 5356.042309040155, 0.0, 1820)
femmesh.addNode(3355.527573097588, 5858.569906479672, 0.0, 1821)
femmesh.addNode(3503.675721245736, 5858.569906479672, 0.0, 1822)
femmesh.addNode(3693.6433966943337, 873.8467405155088, 0.0, 1823)
femmesh.addNode(3621.2101717073288, 1014.7715245315554, 0.0, 1824)
femmesh.addNode(3774.4370065749376, 1080.8765259658132, 0.0, 1825)
femmesh.addNode(3614.7831297951334, 1135.3267294675425, 0.0, 1826)
femmesh.addNode(7308.168304136851, 3540.3907854193963, 0.0, 1827)
femmesh.addNode(7329.519705348524, 3668.438309738362, 0.0, 1828)
femmesh.addNode(2351.071561555146, 5464.8016075310625, 0.0, 1829)
femmesh.addNode(2517.9439460772987, 5530.8661191260135, 0.0, 1830)
femmesh.addNode(2389.0946067443715, 5592.483835165739, 0.0, 1831)
femmesh.addNode(7476.708374486895, 3623.3374582747188, 0.0, 1832)
femmesh.addNode(7387.218740890208, 3783.6937040089506, 0.0, 1833)
femmesh.addNode(264.8159832750697, 4794.274475181297, 0.0, 1834)
femmesh.addNode(115.90583442605337, 4878.8892733964185, 0.0, 1835)
femmesh.addNode(5066.142806421767, 2936.5613377421214, 0.0, 1836)
femmesh.addNode(6610.269149649973, 4508.00993484406, 0.0, 1837)
femmesh.addNode(6757.586700596668, 4614.250951643367, 0.0, 1838)
femmesh.addNode(4334.870903894452, 5879.9619519579355, 0.0, 1839)
femmesh.addNode(4340.567910506723, 5708.837366687199, 0.0, 1840)
femmesh.addNode(4450.141451056704, 5828.875414729264, 0.0, 1841)
femmesh.addNode(3078.9276225240787, 4324.095807476484, 0.0, 1842)
femmesh.addNode(2946.1440390510493, 4429.205442927528, 0.0, 1843)
femmesh.addNode(4074.807870057888, 4932.449789978336, 0.0, 1844)
femmesh.addNode(4012.842558023305, 4801.062711233935, 0.0, 1845)
femmesh.addNode(4141.284010962332, 4780.438160576226, 0.0, 1846)
femmesh.addNode(1747.2579783724107, 3875.644415982817, 0.0, 1847)
femmesh.addNode(2660.622957582937, 5718.953767887817, 0.0, 1848)
femmesh.addNode(2715.9727952830062, 5889.679594507472, 0.0, 1849)
femmesh.addNode(7904.05724182452, 2776.4838779695324, 0.0, 1850)
femmesh.addNode(7744.4953222567765, 2719.066330219619, 0.0, 1851)
femmesh.addNode(7031.978852167146, 5894.103755299298, 0.0, 1852)
femmesh.addNode(7104.400988057858, 5764.082264746022, 0.0, 1853)
femmesh.addNode(1937.1775113175327, 3840.538160590363, 0.0, 1854)
femmesh.addNode(1975.34075937455, 4012.1586933798717, 0.0, 1855)
femmesh.addNode(1855.4471574026104, 3942.360709350734, 0.0, 1856)
femmesh.addNode(7471.863053455067, 3151.1056498896787, 0.0, 1857)
femmesh.addNode(7618.352463471664, 3092.499188113319, 0.0, 1858)
femmesh.addNode(4661.4783306005265, 5077.529712672761, 0.0, 1859)
femmesh.addNode(4811.519364438349, 5084.000365417214, 0.0, 1860)
femmesh.addNode(4772.204216500253, 5205.024788966846, 0.0, 1861)
femmesh.addNode(2798.7068677440025, 1787.6258129911243, 0.0, 1862)
femmesh.addNode(2834.1870140297374, 1940.7674693071983, 0.0, 1863)
femmesh.addNode(4827.883864515266, 4936.8039429423425, 0.0, 1864)
femmesh.addNode(6144.39480834965, 1402.428534879095, 0.0, 1865)
femmesh.addNode(5990.823752913397, 1310.3062084491094, 0.0, 1866)
femmesh.addNode(4136.9778702808835, 234.58247592585707, 0.0, 1867)
femmesh.addNode(6610.495799279815, 96.4155087544391, 0.0, 1868)
femmesh.addNode(2990.6537116378404, 3421.4299439355646, 0.0, 1869)
femmesh.addNode(2829.458720555842, 3483.243870983497, 0.0, 1870)
femmesh.addNode(2740.820636317535, 2211.9391300121174, 0.0, 1871)
femmesh.addNode(2859.6191451224117, 2178.2996209417324, 0.0, 1872)
femmesh.addNode(3908.672078980894, 5859.7542282351, 0.0, 1873)
femmesh.addNode(4056.820227129041, 5859.7542282351, 0.0, 1874)
femmesh.addNode(6467.247234647448, 5001.545724247097, 0.0, 1875)
femmesh.addNode(6591.255959536233, 4974.710635327365, 0.0, 1876)
femmesh.addNode(6589.807806515715, 5859.025006498776, 0.0, 1877)
femmesh.addNode(6737.955954663863, 5859.025006498776, 0.0, 1878)
femmesh.addNode(5078.381324322112, 1062.6655124319248, 0.0, 1879)
femmesh.addNode(5092.422025885003, 910.9552513134244, 0.0, 1880)
femmesh.addNode(2859.3008525074783, 4804.418507806555, 0.0, 1881)
femmesh.addNode(2835.609883304285, 4939.288910469388, 0.0, 1882)
femmesh.addNode(960.4225866508884, 5884.503381766299, 0.0, 1883)
femmesh.addNode(3891.875561993178, 4839.298287739064, 0.0, 1884)
femmesh.addNode(3958.351702897622, 4687.286658336954, 0.0, 1885)
femmesh.addNode(2701.965277530323, 1289.8390017920174, 0.0, 1886)
femmesh.addNode(2686.070832009581, 1422.2999702533723, 0.0, 1887)
femmesh.addNode(605.603006177899, 3881.7563314754566, 0.0, 1888)
femmesh.addNode(619.7937607809081, 3727.0940562456517, 0.0, 1889)
femmesh.addNode(6518.640164297685, 767.2873666590488, 0.0, 1890)
femmesh.addNode(2544.0487402064828, 3852.7056010919828, 0.0, 1891)
femmesh.addNode(2676.4794573179215, 3732.72904573479, 0.0, 1892)
femmesh.addNode(1600.4381120887474, 4324.401748834931, 0.0, 1893)
femmesh.addNode(1754.04487122091, 4337.7239198884345, 0.0, 1894)
femmesh.addNode(6710.777454781017, 542.467609048538, 0.0, 1895)
femmesh.addNode(6567.375858191516, 466.4642373368417, 0.0, 1896)
femmesh.addNode(3597.0397254102018, 4017.7082010103677, 0.0, 1897)
femmesh.addNode(3692.166693420489, 3950.8847568771052, 0.0, 1898)
femmesh.addNode(2405.8428937046474, 3738.963817978146, 0.0, 1899)
femmesh.addNode(5619.887142045157, 5306.876096301861, 0.0, 1900)
femmesh.addNode(5572.570296005811, 5428.7320879328745, 0.0, 1901)
femmesh.addNode(6546.420083768266, 2143.6364977291387, 0.0, 1902)
femmesh.addNode(6534.71285351807, 2003.1437303829262, 0.0, 1903)
femmesh.addNode(6661.755972195055, 2060.89762175425, 0.0, 1904)
femmesh.addNode(1785.421226429428, 4047.2649487723256, 0.0, 1905)
femmesh.addNode(229.11242057140092, 901.0950384454567, 0.0, 1906)
femmesh.addNode(114.31190472681524, 972.6873183087745, 0.0, 1907)
femmesh.addNode(115.90583442605337, 5028.8892733964185, 0.0, 1908)
femmesh.addNode(229.90753311756453, 5100.119235980861, 0.0, 1909)
femmesh.addNode(5651.253492858983, 5172.165535363206, 0.0, 1910)
femmesh.addNode(2273.6450066794923, 5640.314262947215, 0.0, 1911)
femmesh.addNode(2132.2498444518, 2371.7418705284013, 0.0, 1912)
femmesh.addNode(1964.6544828830106, 2321.1415322539888, 0.0, 1913)
femmesh.addNode(2807.178257839564, 2862.364633570077, 0.0, 1914)
femmesh.addNode(2881.058677130637, 3005.9410690973846, 0.0, 1915)
femmesh.addNode(2761.4371653960625, 3005.0434977123814, 0.0, 1916)
femmesh.addNode(5046.243056726747, 1468.334306036254, 0.0, 1917)
femmesh.addNode(5197.7009791400105, 1508.5853743470088, 0.0, 1918)
femmesh.addNode(3924.3826261840104, 5566.353830582119, 0.0, 1919)
femmesh.addNode(3834.340681907346, 5654.155844297757, 0.0, 1920)
femmesh.addNode(2765.008403937973, 4336.461585722394, 0.0, 1921)
femmesh.addNode(2763.7599262879953, 4173.709179747522, 0.0, 1922)
femmesh.addNode(4052.5730994594905, 5449.276911793069, 0.0, 1923)
femmesh.addNode(4093.6827793854254, 5602.431537681148, 0.0, 1924)
femmesh.addNode(982.6683693024718, 5503.227450641941, 0.0, 1925)
femmesh.addNode(417.2879452566067, 466.8016395220319, 0.0, 1926)
femmesh.addNode(508.7444143672392, 326.95944689057853, 0.0, 1927)
femmesh.addNode(5450.218313808894, 5509.118768769158, 0.0, 1928)
femmesh.addNode(5557.278507213602, 5594.320657597664, 0.0, 1929)
femmesh.addNode(470.5308171071142, 2515.3115745530667, 0.0, 1930)
femmesh.addNode(630.932919080582, 2568.7912427227966, 0.0, 1931)
femmesh.addNode(496.7595784273812, 2628.929840408557, 0.0, 1932)
femmesh.addNode(697.0632151891969, 5201.988225101015, 0.0, 1933)
femmesh.addNode(833.3415872979735, 5146.418012275519, 0.0, 1934)
femmesh.addNode(820.2428164730273, 5283.169730906987, 0.0, 1935)
femmesh.addNode(541.1354489900748, 2350.456368850859, 0.0, 1936)
femmesh.addNode(406.96210833687405, 2410.594966536619, 0.0, 1937)
femmesh.addNode(557.5924610273341, 2748.984203611966, 0.0, 1938)
femmesh.addNode(397.1903590538665, 2695.5045354422364, 0.0, 1939)
femmesh.addNode(2868.69212630085, 632.9041796646253, 0.0, 1940)
femmesh.addNode(2905.2494605337374, 792.9283531346159, 0.0, 1941)
femmesh.addNode(7664.578904770944, 3742.8247401726503, 0.0, 1942)
femmesh.addNode(7617.032445182774, 3603.132095707422, 0.0, 1943)
femmesh.addNode(5688.834075509207, 5635.835371315612, 0.0, 1944)
femmesh.addNode(2835.0680853597864, 1087.804777581361, 0.0, 1945)
femmesh.addNode(2963.369439976219, 1026.9726322732245, 0.0, 1946)
femmesh.addNode(6340.423995102085, 1665.3379852695818, 0.0, 1947)
femmesh.addNode(6226.803367498764, 1529.9725840824003, 0.0, 1948)
femmesh.addNode(6198.40100231583, 4607.962674867387, 0.0, 1949)
femmesh.addNode(6295.655785452038, 4695.889122002292, 0.0, 1950)
femmesh.addNode(6141.086104889979, 4721.160980590818, 0.0, 1951)
femmesh.addNode(1095.0269737107994, 1554.6450345422584, 0.0, 1952)
femmesh.addNode(1104.2637701267145, 1213.6699445437166, 0.0, 1953)
femmesh.addNode(958.5007554570523, 1272.825361796577, 0.0, 1954)
femmesh.addNode(956.2206870727696, 1162.2577472773928, 0.0, 1955)
femmesh.addNode(2199.5709326054184, 5877.104601161822, 0.0, 1956)
femmesh.addNode(1734.081020453886, 2787.9723588713723, 0.0, 1957)
femmesh.addNode(1891.001108009575, 2851.915795276942, 0.0, 1958)
femmesh.addNode(1736.9676219184273, 2902.338765495332, 0.0, 1959)
femmesh.addNode(1828.5895853807847, 2634.6256020695173, 0.0, 1960)
femmesh.addNode(6134.584287627019, 2987.6289609233636, 0.0, 1961)
femmesh.addNode(6101.7290497114745, 4521.373303761668, 0.0, 1962)
femmesh.addNode(6256.298730273534, 4496.101445173143, 0.0, 1963)
femmesh.addNode(5857.354078933115, 1418.2240730282406, 0.0, 1964)
femmesh.addNode(5822.504611999318, 1304.6982930773515, 0.0, 1965)
femmesh.addNode(1888.9167951274944, 2477.5189325062656, 0.0, 1966)
femmesh.addNode(1844.8023519720516, 2375.3244152337757, 0.0, 1967)
femmesh.addNode(5960.852762682323, 1515.7453650780592, 0.0, 1968)
femmesh.addNode(1059.1873144795802, 4457.153479875167, 0.0, 1969)
femmesh.addNode(1038.1430006814828, 4324.046490503941, 0.0, 1970)
femmesh.addNode(943.9554550959066, 4714.485342509688, 0.0, 1971)
femmesh.addNode(1094.7388707024388, 4778.633087063434, 0.0, 1972)
femmesh.addNode(951.2002861358992, 4832.749925421722, 0.0, 1973)
femmesh.addNode(1780.464510430469, 3037.115297889648, 0.0, 1974)
femmesh.addNode(1623.54442287478, 2973.171861484078, 0.0, 1975)
femmesh.addNode(7695.881976950162, 2857.5668949484207, 0.0, 1976)
femmesh.addNode(5066.073965793246, 3770.1498268108467, 0.0, 1977)
femmesh.addNode(6309.629407533934, 1018.9383787391344, 0.0, 1978)
femmesh.addNode(2122.9942760122385, 3030.256591203203, 0.0, 1979)
femmesh.addNode(2231.8819403421817, 2971.23325307748, 0.0, 1980)
femmesh.addNode(2210.716645820956, 3133.8060349395278, 0.0, 1981)
femmesh.addNode(7024.44415382708, 714.5494111291157, 0.0, 1982)
femmesh.addNode(7162.154117822402, 775.847411466983, 0.0, 1983)
femmesh.addNode(2135.765057170747, 583.2265580310386, 0.0, 1984)
femmesh.addNode(1997.8116526267086, 488.0011916981422, 0.0, 1985)
femmesh.addNode(5306.876686569009, 3892.227718920938, 0.0, 1986)
femmesh.addNode(7175.249352474062, 3451.329415585643, 0.0, 1987)
femmesh.addNode(6925.481021494717, 5565.3732342272815, 0.0, 1988)
femmesh.addNode(6883.440836197393, 5438.282119578644, 0.0, 1989)
femmesh.addNode(5606.86911812363, 2093.70659134238, 0.0, 1990)
femmesh.addNode(5660.464533261258, 2260.0534943780085, 0.0, 1991)
femmesh.addNode(285.11217913693537, 2714.4291240584766, 0.0, 1992)
femmesh.addNode(116.93344090997864, 2776.7040379390633, 0.0, 1993)
femmesh.addNode(4988.972507509319, 857.1114505032854, 0.0, 1994)
femmesh.addNode(4893.101089120466, 960.2231076099847, 0.0, 1995)
femmesh.addNode(2016.9080134214657, 1939.5556255808235, 0.0, 1996)
femmesh.addNode(1896.5057631114096, 2030.8733117837942, 0.0, 1997)
femmesh.addNode(1323.6834929207503, 2362.971732976295, 0.0, 1998)
femmesh.addNode(1323.8824958973114, 2195.3209915792086, 0.0, 1999)
femmesh.addNode(1453.1206788718498, 2310.1831316849275, 0.0, 2000)
femmesh.addNode(361.1172557087026, 2293.413435589642, 0.0, 2001)
femmesh.addNode(1312.679500735609, 2503.461154840749, 0.0, 2002)
femmesh.addNode(2049.2545816915085, 2522.9418628172593, 0.0, 2003)
femmesh.addNode(1929.4024507805498, 2577.124745797046, 0.0, 2004)
femmesh.addNode(2621.111417919812, 4128.705511745895, 0.0, 2005)
femmesh.addNode(2586.805127643279, 4290.876113564129, 0.0, 2006)
femmesh.addNode(6555.48563213801, 3021.6568402593944, 0.0, 2007)
femmesh.addNode(2964.686401565857, 3727.1623270283053, 0.0, 2008)
femmesh.addNode(3062.925705868978, 3801.3223506665336, 0.0, 2009)
femmesh.addNode(2911.9994002720605, 3837.4998420857705, 0.0, 2010)
femmesh.addNode(4559.593457997189, 5722.662049699071, 0.0, 2011)
femmesh.addNode(4553.896451384919, 5893.786634969807, 0.0, 2012)
femmesh.addNode(4724.369464557572, 1832.9681339905367, 0.0, 2013)
femmesh.addNode(4932.877376845581, 4561.061428400926, 0.0, 2014)
femmesh.addNode(5811.981793766906, 5878.868382934923, 0.0, 2015)
femmesh.addNode(5817.748976105708, 5718.8525204253, 0.0, 2016)
femmesh.addNode(5931.69310826472, 5839.984137490378, 0.0, 2017)
femmesh.addNode(3044.4560275336207, 4547.633598909104, 0.0, 2018)
femmesh.addNode(2935.004622264955, 4537.233233096503, 0.0, 2019)
femmesh.addNode(2203.983337908146, 2627.365123475968, 0.0, 2020)
femmesh.addNode(2172.7355001048554, 2471.347683819182, 0.0, 2021)
femmesh.addNode(357.0191453164326, 4925.712486559394, 0.0, 2022)
femmesh.addNode(390.02345973939555, 4762.208414947855, 0.0, 2023)
femmesh.addNode(2019.0236400037904, 2916.9830056210285, 0.0, 2024)
femmesh.addNode(1997.8583454825643, 3079.5557874830765, 0.0, 2025)
femmesh.addNode(2110.7115754297733, 750.2504614160435, 0.0, 2026)
femmesh.addNode(3041.340900653732, 3613.9968653507062, 0.0, 2027)
femmesh.addNode(2890.4145950568145, 3650.174356769943, 0.0, 2028)
femmesh.addNode(5311.137711930107, 2967.35632674696, 0.0, 2029)
femmesh.addNode(5201.974719971546, 2846.855502637292, 0.0, 2030)
femmesh.addNode(6176.620599958227, 3478.7754619684, 0.0, 2031)
femmesh.addNode(6025.885224029433, 3489.674616050829, 0.0, 2032)
femmesh.addNode(3895.154928619901, 930.6495208394973, 0.0, 2033)
femmesh.addNode(3823.3517150693233, 825.6290814342326, 0.0, 2034)
femmesh.addNode(3937.6425952020913, 795.9572812439036, 0.0, 2035)
femmesh.addNode(7862.696186409727, 3065.8765348724437, 0.0, 2036)
femmesh.addNode(2285.5350859982673, 3398.9212438450045, 0.0, 2037)
femmesh.addNode(2134.088494839776, 3339.431408083173, 0.0, 2038)
femmesh.addNode(2253.2755726495034, 3252.3192088270816, 0.0, 2039)
femmesh.addNode(3757.9376274800234, 3823.5144088598654, 0.0, 2040)
femmesh.addNode(3799.4896348205643, 3980.5001159010535, 0.0, 2041)
femmesh.addNode(3947.3972170108864, 3924.7483901974074, 0.0, 2042)
femmesh.addNode(3881.6262829513516, 4052.1187382146472, 0.0, 2043)
femmesh.addNode(2046.3701964838601, 1468.469720312286, 0.0, 2044)
femmesh.addNode(2182.749407243576, 1457.5110268403841, 0.0, 2045)
femmesh.addNode(6309.568571257469, 3704.27396738706, 0.0, 2046)
femmesh.addNode(6470.406219127042, 3624.1023853203633, 0.0, 2047)
femmesh.addNode(7189.547221898076, 1413.6196060209998, 0.0, 2048)
femmesh.addNode(7039.384252158704, 1369.4352964596017, 0.0, 2049)
femmesh.addNode(2874.950817498702, 1655.2972602927139, 0.0, 2050)
femmesh.addNode(2748.8702815829956, 1632.842771485479, 0.0, 2051)
femmesh.addNode(2197.6919269761875, 5746.535498570298, 0.0, 2052)
femmesh.addNode(4431.063797741977, 4480.107213959832, 0.0, 2053)
femmesh.addNode(4267.258892054808, 4509.127878328238, 0.0, 2054)
femmesh.addNode(3948.709146240657, 2797.42376769304, 0.0, 2055)
femmesh.addNode(3866.7462322586603, 2925.409868960637, 0.0, 2056)
femmesh.addNode(3820.7106735845946, 1306.2305276669238, 0.0, 2057)
femmesh.addNode(3663.0080112544765, 1261.5447763772179, 0.0, 2058)
femmesh.addNode(3497.537051254293, 5563.713867833076, 0.0, 2059)
femmesh.addNode(3577.4924723203353, 5652.971522542328, 0.0, 2060)
femmesh.addNode(4089.5349859034623, 134.00909568126906, 0.0, 2061)
femmesh.addNode(2134.5308489642575, 109.67753616863163, 0.0, 2062)
femmesh.addNode(5312.718628907873, 1166.4983181975251, 0.0, 2063)
femmesh.addNode(6222.290924853829, 4001.9976645016054, 0.0, 2064)
femmesh.addNode(1079.9876943417273, 3545.8533105911742, 0.0, 2065)
femmesh.addNode(1045.8966946477672, 3419.1669711684094, 0.0, 2066)
femmesh.addNode(1004.0300629388269, 4934.771360211719, 0.0, 2067)
femmesh.addNode(853.2466473322945, 4870.6236156579735, 0.0, 2068)
femmesh.addNode(4830.323435150298, 1299.6378865566894, 0.0, 2069)
femmesh.addNode(4020.790950150148, 4108.498417222629, 0.0, 2070)
femmesh.addNode(3872.8833679598265, 4164.250142926276, 0.0, 2071)
femmesh.addNode(551.9085475982022, 5895.652885340391, 0.0, 2072)
femmesh.addNode(4539.250884690918, 3984.9103855782014, 0.0, 2073)
femmesh.addNode(4401.091734194644, 3967.4977028993835, 0.0, 2074)
femmesh.addNode(1739.3760215019643, 642.1206217912784, 0.0, 2075)
femmesh.addNode(4414.712086359601, 964.1149262439815, 0.0, 2076)
femmesh.addNode(4432.560100345837, 1105.6970606519585, 0.0, 2077)
femmesh.addNode(2902.88944386623, 4688.434099394707, 0.0, 2078)
femmesh.addNode(2804.5774553836586, 4570.005943413131, 0.0, 2079)
femmesh.addNode(3736.131063276524, 739.1545009199151, 0.0, 2080)
femmesh.addNode(6742.441712167366, 4957.732317566175, 0.0, 2081)
femmesh.addNode(6899.619288926972, 4937.769251053827, 0.0, 2082)
femmesh.addNode(6853.295633519457, 5058.611014899903, 0.0, 2083)
femmesh.addNode(3945.615355042516, 2376.3212266330206, 0.0, 2084)
femmesh.addNode(3851.206075049406, 2279.9510227922738, 0.0, 2085)
femmesh.addNode(1013.4351897339211, 1043.9426153036084, 0.0, 2086)
femmesh.addNode(1052.1721619096795, 897.3599629917585, 0.0, 2087)
femmesh.addNode(6985.961643481083, 5682.269782192422, 0.0, 2088)
femmesh.addNode(7137.516038315756, 5658.144536339848, 0.0, 2089)
femmesh.addNode(1519.8452677808427, 388.03580605075194, 0.0, 2090)
femmesh.addNode(1645.941608620948, 401.5448044541864, 0.0, 2091)
femmesh.addNode(2873.8152439865144, 2057.5325844996282, 0.0, 2092)
femmesh.addNode(2989.84776470916, 2164.3775686253234, 0.0, 2093)
femmesh.addNode(4223.585308002688, 5622.639261403985, 0.0, 2094)
femmesh.addNode(3935.8342441096834, 4338.594214069401, 0.0, 2095)
femmesh.addNode(611.873125946062, 4334.106769829625, 0.0, 2096)
femmesh.addNode(733.6362185709941, 4227.651152955843, 0.0, 2097)
femmesh.addNode(7581.150352487558, 2432.381567608064, 0.0, 2098)
femmesh.addNode(3403.524047294277, 5424.889374396997, 0.0, 2099)
femmesh.addNode(1787.7696786472493, 2119.6000411866103, 0.0, 2100)
femmesh.addNode(122.33388559878529, 2220.5435551724368, 0.0, 2101)
femmesh.addNode(107.16520404830159, 534.4523311883751, 0.0, 2102)
femmesh.addNode(221.96571989288725, 612.8600513250573, 0.0, 2103)
femmesh.addNode(219.5388987937784, 5385.207576670434, 0.0, 2104)
femmesh.addNode(105.53720010226726, 5463.977614085991, 0.0, 2105)
femmesh.addNode(1259.259259259257, 133.34608372115665, 0.0, 2106)
femmesh.addNode(1282.3138456194688, 2868.7521275506388, 0.0, 2107)
femmesh.addNode(1433.723060511657, 2913.1098754544573, 0.0, 2108)
femmesh.addNode(948.00360479041, 880.8294566385132, 0.0, 2109)
femmesh.addNode(1075.289088974679, 793.4065190475874, 0.0, 2110)
femmesh.addNode(6589.536761353122, 2574.932247808146, 0.0, 2111)
femmesh.addNode(6460.997526481414, 2481.9823565555043, 0.0, 2112)
femmesh.addNode(6575.846367670441, 2422.6849349625427, 0.0, 2113)
femmesh.addNode(2947.5439702643134, 898.6912291650685, 0.0, 2114)
femmesh.addNode(3039.5603824576574, 751.6023090013214, 0.0, 2115)
femmesh.addNode(2536.400969330436, 3484.3843141861344, 0.0, 2116)
femmesh.addNode(2392.837877750936, 3425.549413043266, 0.0, 2117)
femmesh.addNode(1516.7188628949098, 2976.433528555862, 0.0, 2118)
femmesh.addNode(1370.3943890438286, 3051.1480749205566, 0.0, 2119)
femmesh.addNode(2087.1932928929655, 4803.017486089089, 0.0, 2120)
femmesh.addNode(349.94486876785226, 799.0054127402232, 0.0, 2121)
femmesh.addNode(349.45625765008185, 943.2850109123156, 0.0, 2122)
femmesh.addNode(2179.505229241481, 4695.819659655164, 0.0, 2123)
femmesh.addNode(269.82220153744595, 4177.761905376815, 0.0, 2124)
femmesh.addNode(333.80191494586927, 4032.0114400249277, 0.0, 2125)
femmesh.addNode(391.6256612038553, 4161.124494027914, 0.0, 2126)
femmesh.addNode(6630.144079734002, 713.4466357372087, 0.0, 2127)
femmesh.addNode(6756.868396940794, 671.8485645512726, 0.0, 2128)
femmesh.addNode(6889.09872000681, 647.2048457307616, 0.0, 2129)
femmesh.addNode(6808.465344959794, 818.1838724194323, 0.0, 2130)
femmesh.addNode(5948.3528377930525, 238.14633278656453, 0.0, 2131)
femmesh.addNode(6085.026401050542, 273.47208248674957, 0.0, 2132)
femmesh.addNode(105.99922763972995, 4124.324425686915, 0.0, 2133)
femmesh.addNode(913.4057171390466, 5357.127630712532, 0.0, 2134)
femmesh.addNode(1714.0797789029489, 2413.71857274216, 0.0, 2135)
femmesh.addNode(1789.817466658465, 2257.341172489884, 0.0, 2136)
femmesh.addNode(4343.739180673707, 100.573380244588, 0.0, 2137)
femmesh.addNode(777.1273450302701, 5412.697843538027, 0.0, 2138)
femmesh.addNode(4908.512566250808, 1388.8690790899618, 0.0, 2139)
femmesh.addNode(218.06796723691878, 345.9569586599116, 0.0, 2140)
femmesh.addNode(107.16520404830159, 384.45233118837507, 0.0, 2141)
femmesh.addNode(110.90276318861717, 261.5046274715365, 0.0, 2142)
femmesh.addNode(105.53720010226726, 5613.977614085991, 0.0, 2143)
femmesh.addNode(216.32598405769025, 5652.694311614887, 0.0, 2144)
femmesh.addNode(110.78878395542299, 5738.716697528895, 0.0, 2145)
femmesh.addNode(1194.213704220396, 2106.2409471171622, 0.0, 2146)
femmesh.addNode(1323.6508901714956, 2053.4523458257954, 0.0, 2147)
femmesh.addNode(3439.4562760794015, 4036.9645626917677, 0.0, 2148)
femmesh.addNode(5102.323366317663, 3637.623711727803, 0.0, 2149)
femmesh.addNode(5192.053116432853, 3778.276271536206, 0.0, 2150)
femmesh.addNode(2743.606969362506, 1039.0218085536048, 0.0, 2151)
femmesh.addNode(2576.3704242610265, 1085.2182210809174, 0.0, 2152)
femmesh.addNode(6804.3613253016465, 2116.410331309036, 0.0, 2153)
femmesh.addNode(6792.65409505145, 1975.9175639628234, 0.0, 2154)
femmesh.addNode(2752.8042214679663, 3596.4093326610955, 0.0, 2155)
femmesh.addNode(1988.9273719447988, 2680.048532380511, 0.0, 2156)
femmesh.addNode(3041.1563155074246, 1725.8995436728155, 0.0, 2157)
femmesh.addNode(1100.281604188516, 682.3965648871836, 0.0, 2158)
femmesh.addNode(1223.0367794568297, 669.4698396757713, 0.0, 2159)
femmesh.addNode(366.4010352573292, 5784.369582869286, 0.0, 2160)
femmesh.addNode(403.7603994500542, 5895.652885340391, 0.0, 2161)
femmesh.addNode(258.93693210357094, 5888.716697528895, 0.0, 2162)
femmesh.addNode(6472.549222137908, 637.9064111563141, 0.0, 2163)
femmesh.addNode(3220.21226513397, 4264.997464950411, 0.0, 2164)
femmesh.addNode(3300.3036115071172, 4412.736026789735, 0.0, 2165)
femmesh.addNode(3182.351147101589, 4356.324368052165, 0.0, 2166)
femmesh.addNode(1323.0632877363346, 5198.36581262421, 0.0, 2167)
femmesh.addNode(1092.4349912400621, 3809.8866812990364, 0.0, 2168)
femmesh.addNode(1228.641778249933, 3825.032690734036, 0.0, 2169)
femmesh.addNode(1110.2490055424669, 3927.9708066331245, 0.0, 2170)
femmesh.addNode(5432.862246156699, 1659.8085310110405, 0.0, 2171)
femmesh.addNode(5312.247359812849, 1535.7183175122905, 0.0, 2172)
femmesh.addNode(4284.579771678116, 743.0682159667602, 0.0, 2173)
femmesh.addNode(4339.5006478753985, 877.8269672713129, 0.0, 2174)
femmesh.addNode(7869.253322910761, 4723.916159348964, 0.0, 2175)
femmesh.addNode(7869.253322910761, 4573.916159348964, 0.0, 2176)
femmesh.addNode(2414.7491299018284, 1299.415971174509, 0.0, 2177)
femmesh.addNode(2512.8936954094506, 1188.247910036332, 0.0, 2178)
femmesh.addNode(5907.448716416637, 3538.9300693246923, 0.0, 2179)
femmesh.addNode(5789.796822400842, 3680.0852525336304, 0.0, 2180)
femmesh.addNode(3551.203558284512, 5030.119083462583, 0.0, 2181)
femmesh.addNode(3713.064977211434, 5035.298981618609, 0.0, 2182)
femmesh.addNode(3544.6030790878176, 4899.376238577994, 0.0, 2183)
femmesh.addNode(6255.063988027521, 4362.120848560535, 0.0, 2184)
femmesh.addNode(2150.775005635456, 4226.542046895456, 0.0, 2185)
femmesh.addNode(1982.3726521854378, 4222.2762911864775, 0.0, 2186)
femmesh.addNode(2062.012758909646, 4118.246997849221, 0.0, 2187)
femmesh.addNode(5834.149784716776, 2263.3079993690417, 0.0, 2188)
femmesh.addNode(5761.673380345992, 2365.346989404023, 0.0, 2189)
femmesh.addNode(1495.8199231385202, 5718.564900509944, 0.0, 2190)
femmesh.addNode(1550.58628059581, 5886.806208536376, 0.0, 2191)
femmesh.addNode(3979.885036149308, 3498.4802202200585, 0.0, 2192)
femmesh.addNode(4079.391222570686, 3591.950624184464, 0.0, 2193)
femmesh.addNode(2139.2190761347856, 5178.57969758471, 0.0, 2194)
femmesh.addNode(2035.440833527116, 5310.98145928874, 0.0, 2195)
femmesh.addNode(3159.018968897226, 4471.8343693158085, 0.0, 2196)
femmesh.addNode(3147.8795521111315, 4579.862159484784, 0.0, 2197)
femmesh.addNode(3703.4463807042207, 5701.600063430739, 0.0, 2198)
femmesh.addNode(3577.749795319809, 5765.768353847754, 0.0, 2199)
femmesh.addNode(3322.681248862429, 3102.725019119357, 0.0, 2200)
femmesh.addNode(2009.0042761548657, 3448.587510522422, 0.0, 2201)
femmesh.addNode(1976.7447628061018, 3301.985475504499, 0.0, 2202)
femmesh.addNode(7335.781866850299, 1450.367997192385, 0.0, 2203)
femmesh.addNode(7298.808459317808, 1301.5625737448872, 0.0, 2204)
femmesh.addNode(6433.342048945007, 2207.808825033858, 0.0, 2205)
femmesh.addNode(6560.385167621993, 2265.562716405182, 0.0, 2206)
femmesh.addNode(6344.1602456867, 5084.7892642009865, 0.0, 2207)
femmesh.addNode(6363.3068733518685, 4954.090729196368, 0.0, 2208)
femmesh.addNode(5420.48293620857, 1073.0812647283215, 0.0, 2209)
femmesh.addNode(5636.4469701179205, 1071.1427313877293, 0.0, 2210)
femmesh.addNode(5521.562629515254, 1090.2991077312333, 0.0, 2211)
femmesh.addNode(278.23760289704717, 1514.9346070429979, 0.0, 2212)
femmesh.addNode(115.9124845934236, 1568.7618953528586, 0.0, 2213)
femmesh.addNode(5103.528122822843, 1686.2812414461969, 0.0, 2214)
femmesh.addNode(5053.2724110641775, 1841.9882919767379, 0.0, 2215)
femmesh.addNode(1051.8538532787088, 4028.3488181723155, 0.0, 2216)
femmesh.addNode(1005.7013945209077, 4193.623370053989, 0.0, 2217)
femmesh.addNode(927.8897597747948, 4078.0993490797973, 0.0, 2218)
femmesh.addNode(4666.586453120597, 5493.698449588381, 0.0, 2219)
femmesh.addNode(4621.383858353274, 5614.038132439276, 0.0, 2220)
femmesh.addNode(3918.1728968318985, 3349.2515745121373, 0.0, 2221)
femmesh.addNode(3856.021579839062, 3472.4683516780933, 0.0, 2222)
femmesh.addNode(1431.3744672554456, 4538.966189024446, 0.0, 2223)
femmesh.addNode(1265.489970927942, 4528.35996042485, 0.0, 2224)
femmesh.addNode(7726.825377946028, 1942.2272502505975, 0.0, 2225)
femmesh.addNode(7890.914159617989, 1969.6823553603576, 0.0, 2226)
femmesh.addNode(7835.9112183280395, 2072.54489489024, 0.0, 2227)
femmesh.addNode(7624.124586218421, 2280.7844078576386, 0.0, 2228)
femmesh.addNode(7723.850394595375, 2433.5677447497474, 0.0, 2229)
femmesh.addNode(2602.790930382124, 5344.664757077794, 0.0, 2230)
femmesh.addNode(2452.030834905928, 5265.746730474879, 0.0, 2231)
femmesh.addNode(6000.038358016767, 84.85350532801614, 0.0, 2232)
femmesh.addNode(6806.1573192643755, 2255.2599571703895, 0.0, 2233)
femmesh.addNode(6937.055442120771, 2170.279899378963, 0.0, 2234)
femmesh.addNode(456.59054196021316, 4259.4016821650075, 0.0, 2235)
femmesh.addNode(2875.5000635273027, 5450.603180186572, 0.0, 2236)
femmesh.addNode(2736.1713142964963, 5407.3184002070575, 0.0, 2237)
femmesh.addNode(520.5702553686365, 4113.651216813121, 0.0, 2238)
femmesh.addNode(4725.138858681489, 5678.94935267982, 0.0, 2239)
femmesh.addNode(867.672175064259, 1103.0980325564688, 0.0, 2240)
femmesh.addNode(4612.257019158057, 1857.3018836995557, 0.0, 2241)
femmesh.addNode(4631.663732506628, 1713.5254760795217, 0.0, 2242)
femmesh.addNode(5165.473966646443, 1611.2809824782546, 0.0, 2243)
femmesh.addNode(1491.483298033678, 4655.838166337262, 0.0, 2244)
femmesh.addNode(244.04867770733344, 2209.7428223064157, 0.0, 2245)
femmesh.addNode(518.4476741122903, 4790.486231347066, 0.0, 2246)
femmesh.addNode(677.5427984865944, 4727.964108618078, 0.0, 2247)
femmesh.addNode(6401.074322406835, 3457.8401533400597, 0.0, 2248)
femmesh.addNode(2675.4711781813003, 1541.6784059236938, 0.0, 2249)
femmesh.addNode(2107.3207316237526, 3586.4829150275527, 0.0, 2250)
femmesh.addNode(3735.665386497428, 3428.4265957897655, 0.0, 2251)
femmesh.addNode(3593.0193578296266, 3410.838213366608, 0.0, 2252)
femmesh.addNode(3629.6296296296214, 5907.198447368082, 0.0, 2253)
femmesh.addNode(3969.469575994247, 4552.804844018185, 0.0, 2254)
femmesh.addNode(3987.4372362959184, 2534.404134861873, 0.0, 2255)
femmesh.addNode(4078.573499719767, 2434.240662785238, 0.0, 2256)
femmesh.addNode(4186.722755746305, 5879.9619519579355, 0.0, 2257)
femmesh.addNode(7890.914159617989, 1819.6823553603576, 0.0, 2258)
femmesh.addNode(7870.910225369618, 1425.0, 0.0, 2259)
femmesh.addNode(7870.910225369618, 1275.0, 0.0, 2260)
femmesh.addNode(6866.5716411690555, 124.74799179239241, 0.0, 2261)
femmesh.addNode(7559.33340964109, 3487.8767014368336, 0.0, 2262)
femmesh.addNode(7425.711445865734, 3378.524028650869, 0.0, 2263)
femmesh.addNode(4599.87396111443, 4097.525734443105, 0.0, 2264)
femmesh.addNode(3765.1238050583524, 1991.5513807924126, 0.0, 2265)
femmesh.addNode(3768.745103804172, 2170.829827966237, 0.0, 2266)
femmesh.addNode(351.69790691313676, 5051.4450770115, 0.0, 2267)
femmesh.addNode(349.7937711785945, 5193.785766199524, 0.0, 2268)
femmesh.addNode(3954.29064722113, 2167.03655973035, 0.0, 2269)
femmesh.addNode(5349.774749851744, 225.05735254665314, 0.0, 2270)
femmesh.addNode(5260.121680093729, 106.03318115563226, 0.0, 2271)
femmesh.addNode(5422.986403091337, 119.02417139102087, 0.0, 2272)
femmesh.addNode(6972.734766711699, 5180.3988864382645, 0.0, 2273)
femmesh.addNode(684.2473493622629, 1269.0568003803999, 0.0, 2274)
femmesh.addNode(519.911167964922, 1210.1671762069536, 0.0, 2275)
femmesh.addNode(601.525756935062, 1423.0467934633975, 0.0, 2276)
femmesh.addNode(4132.645815527791, 4522.894754626648, 0.0, 2277)
femmesh.addNode(4022.7966925942883, 4426.648516813008, 0.0, 2278)
femmesh.addNode(2307.611063523833, 5216.809762855928, 0.0, 2279)
femmesh.addNode(6806.97184386805, 5753.128761798074, 0.0, 2280)
femmesh.addNode(6883.8307040189975, 5894.103755299298, 0.0, 2281)
femmesh.addNode(5682.423351996169, 2473.1204054741647, 0.0, 2282)
femmesh.addNode(5651.450237182475, 2629.648693355067, 0.0, 2283)
femmesh.addNode(5557.015014076781, 2518.6207722938916, 0.0, 2284)
femmesh.addNode(6061.175605781594, 1761.2377989538459, 0.0, 2285)
femmesh.addNode(6043.261321831436, 1643.2894142813643, 0.0, 2286)
femmesh.addNode(5856.108603451687, 2476.3749104651984, 0.0, 2287)
femmesh.addNode(2536.4277788531863, 1463.4641573899976, 0.0, 2288)
femmesh.addNode(3124.5363245561475, 451.08367804190334, 0.0, 2289)
femmesh.addNode(3209.9752350559697, 369.58313337508775, 0.0, 2290)
femmesh.addNode(6056.180269888446, 1910.2129210796688, 0.0, 2291)
femmesh.addNode(6185.4950022522735, 1827.8952089324314, 0.0, 2292)
femmesh.addNode(5479.9108472077705, 2418.777637569393, 0.0, 2293)
femmesh.addNode(5336.969693485171, 2357.2070719138624, 0.0, 2294)
femmesh.addNode(3097.073728736894, 614.4576642980824, 0.0, 2295)
femmesh.addNode(3024.7284685463346, 482.46544671468973, 0.0, 2296)
femmesh.addNode(5448.937732394076, 2575.305925450295, 0.0, 2297)
femmesh.addNode(3957.140937022644, 1508.2175924067838, 0.0, 2298)
femmesh.addNode(3993.513256214237, 1350.2198989274975, 0.0, 2299)
femmesh.addNode(4077.242810730434, 1447.4081669022337, 0.0, 2300)
femmesh.addNode(6263.730554272444, 117.47943196069464, 0.0, 2301)
femmesh.addNode(2276.357360839713, 1398.018391909584, 0.0, 2302)
femmesh.addNode(2408.635663619351, 1442.688142454751, 0.0, 2303)
femmesh.addNode(2615.523582816949, 3565.798559948343, 0.0, 2304)
femmesh.addNode(5076.375693568934, 788.8920189431374, 0.0, 2305)
femmesh.addNode(752.5445905477064, 1774.5202439268849, 0.0, 2306)
femmesh.addNode(628.2510041854574, 1652.240400154422, 0.0, 2307)
femmesh.addNode(4568.613810082554, 4443.321216931354, 0.0, 2308)
femmesh.addNode(4486.094864322358, 4584.7313322065265, 0.0, 2309)
femmesh.addNode(2300.3602261328915, 251.10589758032114, 0.0, 2310)
femmesh.addNode(2337.0263113020856, 407.7327484567495, 0.0, 2311)
femmesh.addNode(1023.7445531019662, 1408.6426779993749, 0.0, 2312)
femmesh.addNode(3978.48925755599, 5297.687259382592, 0.0, 2313)
femmesh.addNode(3919.4744122954903, 5167.363945793937, 0.0, 2314)
femmesh.addNode(7883.412314163119, 2340.985292499661, 0.0, 2315)
femmesh.addNode(2922.2171915288045, 4002.4800654445908, 0.0, 2316)
femmesh.addNode(7284.267698537137, 1917.8673882414973, 0.0, 2317)
femmesh.addNode(7306.234586862327, 2063.963116896591, 0.0, 2318)
femmesh.addNode(7183.11358524883, 2006.9795705853064, 0.0, 2319)
femmesh.addNode(3842.604318912467, 3054.3267766741105, 0.0, 2320)
femmesh.addNode(3885.1037730462303, 3231.6456492380794, 0.0, 2321)
femmesh.addNode(1500.8365723925306, 5612.491689013663, 0.0, 2322)
femmesh.addNode(7243.423425557955, 5591.002886373262, 0.0, 2323)
femmesh.addNode(7385.454700595449, 5490.191897550645, 0.0, 2324)
femmesh.addNode(7397.986657930025, 5639.1460300708295, 0.0, 2325)
femmesh.addNode(785.4929079359906, 4108.427483429939, 0.0, 2326)
femmesh.addNode(441.34363146433276, 1354.5229298054128, 0.0, 2327)
femmesh.addNode(484.83236207797637, 1460.8692870382638, 0.0, 2328)
femmesh.addNode(1177.82625911983, 1470.2399156057843, 0.0, 2329)
femmesh.addNode(1222.788131331175, 1315.0431616852973, 0.0, 2330)
femmesh.addNode(1294.070551940008, 1461.0455182281808, 0.0, 2331)
femmesh.addNode(782.2250360398752, 2326.7993293606614, 0.0, 2332)
femmesh.addNode(817.3955750619575, 2166.668140706004, 0.0, 2333)
femmesh.addNode(1412.0209202009983, 1398.0178188316372, 0.0, 2334)
femmesh.addNode(542.9961288993845, 1583.7786069338063, 0.0, 2335)
femmesh.addNode(382.8140034286554, 1515.2547432758215, 0.0, 2336)
femmesh.addNode(476.0370077274794, 972.4146506952213, 0.0, 2337)
femmesh.addNode(6746.491221881684, 5636.232213832933, 0.0, 2338)
femmesh.addNode(6892.365971236818, 5671.310962633455, 0.0, 2339)
femmesh.addNode(4949.513525319367, 3428.022319097815, 0.0, 2340)
femmesh.addNode(3378.3524073609483, 3248.7894743907264, 0.0, 2341)
femmesh.addNode(4180.441565880041, 1385.2155424400858, 0.0, 2342)
femmesh.addNode(4144.0692466884475, 1543.213235919372, 0.0, 2343)
femmesh.addNode(1211.962640586179, 4656.184672477344, 0.0, 2344)
femmesh.addNode(1141.021370004209, 4532.543792098906, 0.0, 2345)
femmesh.addNode(3235.143845328288, 3320.180102717005, 0.0, 2346)
femmesh.addNode(3365.8088211533905, 3359.453569450136, 0.0, 2347)
femmesh.addNode(1005.6599841378174, 4584.978191927661, 0.0, 2348)
femmesh.addNode(5343.126087093426, 3759.7016038378943, 0.0, 2349)
femmesh.addNode(476.3245810279854, 3675.100488306136, 0.0, 2350)
femmesh.addNode(6039.839792973478, 5736.051854305991, 0.0, 2351)
femmesh.addNode(6034.072610634676, 5896.067716815613, 0.0, 2352)
femmesh.addNode(4724.539138080684, 3879.8521361045114, 0.0, 2353)
femmesh.addNode(728.3852605829661, 2438.2960656583386, 0.0, 2354)
femmesh.addNode(5048.63093587988, 2472.9650724552275, 0.0, 2355)
femmesh.addNode(5190.07296768651, 2564.5168410614115, 0.0, 2356)
femmesh.addNode(4200.895429533289, 1823.9546370893156, 0.0, 2357)
femmesh.addNode(4360.715161839882, 1832.5462620969483, 0.0, 2358)
femmesh.addNode(2371.3078228737295, 4357.410141417675, 0.0, 2359)
femmesh.addNode(2246.372338457064, 4254.849451797576, 0.0, 2360)
femmesh.addNode(3993.698451029044, 1996.67767316947, 0.0, 2361)
femmesh.addNode(2610.504161533043, 1241.0560327642613, 0.0, 2362)
femmesh.addNode(2765.442006381899, 1186.8093128366027, 0.0, 2363)
femmesh.addNode(6031.885498192052, 4788.388776411837, 0.0, 2364)
femmesh.addNode(5911.911163459012, 4863.674236303366, 0.0, 2365)
femmesh.addNode(5923.856987020729, 4708.519993347441, 0.0, 2366)
femmesh.addNode(7286.090387282423, 3419.020694170377, 0.0, 2367)
femmesh.addNode(6129.14028132826, 4876.315223546742, 0.0, 2368)
femmesh.addNode(3663.8180092765756, 3663.756267722638, 0.0, 2369)
femmesh.addNode(3797.377525814837, 3577.655241497687, 0.0, 2370)
femmesh.addNode(3784.1742026182096, 3707.798023610966, 0.0, 2371)
femmesh.addNode(6961.2820370341105, 2430.5306858727495, 0.0, 2372)
femmesh.addNode(6821.618519312824, 2412.38217572775, 0.0, 2373)
femmesh.addNode(4163.547062261613, 1658.9375765627624, 0.0, 2374)
femmesh.addNode(4078.2188588105164, 1770.4229209080731, 0.0, 2375)
femmesh.addNode(4311.743405990881, 1165.2881532354422, 0.0, 2376)
femmesh.addNode(4226.370052043458, 1294.1605890939854, 0.0, 2377)
femmesh.addNode(2931.9671225196457, 269.2843737173223, 0.0, 2378)
femmesh.addNode(2887.3530718183306, 109.73864348806697, 0.0, 2379)
femmesh.addNode(1233.6060741626293, 3084.745919409138, 0.0, 2380)
femmesh.addNode(7483.715458329985, 1450.3539772910274, 0.0, 2381)
femmesh.addNode(7374.454220910253, 1562.41100956714, 0.0, 2382)
femmesh.addNode(6088.226950014987, 2417.976531869165, 0.0, 2383)
femmesh.addNode(6134.123109319581, 2545.1496486351202, 0.0, 2384)
femmesh.addNode(5981.254791106105, 2495.774611161012, 0.0, 2385)
femmesh.addNode(2964.4156336164856, 1926.8454169907893, 0.0, 2386)
femmesh.addNode(7595.445078651803, 4034.4888425202735, 0.0, 2387)
femmesh.addNode(4147.789410757405, 5333.764966481622, 0.0, 2388)
femmesh.addNode(3163.6532228854803, 4693.0756837373065, 0.0, 2389)
femmesh.addNode(3550.1444435941503, 3588.5356899986855, 0.0, 2390)
femmesh.addNode(3466.1424689505743, 3465.760417755935, 0.0, 2391)
femmesh.addNode(3923.3624172998075, 1264.1747349529553, 0.0, 2392)
femmesh.addNode(3998.108632906041, 1876.616042541863, 0.0, 2393)
femmesh.addNode(3960.7602656343647, 1711.5989820153097, 0.0, 2394)
femmesh.addNode(977.5435159496644, 3774.7232823360196, 0.0, 2395)
femmesh.addNode(5294.574299708302, 2588.8736449583093, 0.0, 2396)
femmesh.addNode(5524.439605680094, 591.1605143232691, 0.0, 2397)
femmesh.addNode(5663.5270192135895, 532.0947148342314, 0.0, 2398)
femmesh.addNode(3789.5091927760964, 1717.5121107345985, 0.0, 2399)
femmesh.addNode(3769.5339869353493, 1871.4897501648056, 0.0, 2400)
femmesh.addNode(3869.619418680572, 1611.3189891008087, 0.0, 2401)
femmesh.addNode(3891.497144018285, 3737.4133826349143, 0.0, 2402)
femmesh.addNode(7883.412314163119, 2190.985292499661, 0.0, 2403)
femmesh.addNode(7719.323532491159, 2163.530187389901, 0.0, 2404)
femmesh.addNode(5571.1345512394855, 119.02417139102087, 0.0, 2405)
femmesh.addNode(4095.394834727208, 5739.716180193035, 0.0, 2406)
femmesh.addNode(6443.627701319943, 524.9708083217083, 0.0, 2407)
femmesh.addNode(6426.950421937234, 407.36936542407597, 0.0, 2408)
femmesh.addNode(3050.8814379704413, 1857.1783428926346, 0.0, 2409)
femmesh.addNode(6688.9244024937, 2358.5126076578235, 0.0, 2410)
femmesh.addNode(2460.0762051803913, 3620.7040272598288, 0.0, 2411)
femmesh.addNode(4020.1797335369106, 726.5424704676972, 0.0, 2412)
femmesh.addNode(7448.859880910843, 1652.6270012929142, 0.0, 2413)
femmesh.addNode(7300.926289431156, 1652.6410211942718, 0.0, 2414)
femmesh.addNode(6199.552138529563, 421.55654536320475, 0.0, 2415)
femmesh.addNode(5667.59717916392, 272.31699884956925, 0.0, 2416)
femmesh.addNode(5717.436503704574, 2738.718171980015, 0.0, 2417)
femmesh.addNode(5592.028165785186, 2784.218538799742, 0.0, 2418)
femmesh.addNode(3312.0308350021464, 322.4440451423345, 0.0, 2419)
femmesh.addNode(5776.8802221853475, 2849.623155216233, 0.0, 2420)
femmesh.addNode(5836.302293582637, 2695.053309771558, 0.0, 2421)
femmesh.addNode(1273.6175606474137, 5096.826954832783, 0.0, 2422)
femmesh.addNode(1132.9074387762719, 5005.765038226646, 0.0, 2423)
femmesh.addNode(4702.0445995330665, 5893.786634969807, 0.0, 2424)
femmesh.addNode(3870.661708377955, 1159.3759381878317, 0.0, 2425)
femmesh.addNode(4158.242274240241, 3670.950295881611, 0.0, 2426)
femmesh.addNode(3996.584770826026, 3700.6966690831614, 0.0, 2427)
femmesh.addNode(2382.3824813061697, 523.3847523617887, 0.0, 2428)
femmesh.addNode(2499.2910614398543, 631.486556812249, 0.0, 2429)
femmesh.addNode(2924.645499533749, 542.859682598108, 0.0, 2430)
femmesh.addNode(2952.1080953530027, 379.48569634192893, 0.0, 2431)
femmesh.addNode(6303.994112152046, 5884.828693167936, 0.0, 2432)
femmesh.addNode(6452.142260300196, 5884.828693167936, 0.0, 2433)
femmesh.addNode(3219.1815279272582, 5878.55491741685, 0.0, 2434)
femmesh.addNode(637.746473287187, 1888.5440129349008, 0.0, 2435)
femmesh.addNode(6906.599521239654, 4820.83137685584, 0.0, 2436)
femmesh.addNode(7017.4534425917445, 4921.710074189568, 0.0, 2437)
femmesh.addNode(6357.891594196279, 2550.79061937219, 0.0, 2438)
femmesh.addNode(6344.201200513598, 2398.543306526587, 0.0, 2439)
femmesh.addNode(4135.481541320207, 2342.324176319329, 0.0, 2440)
femmesh.addNode(4177.30342257361, 2500.407084548181, 0.0, 2441)
femmesh.addNode(5909.24663219256, 744.0815573421803, 0.0, 2442)
femmesh.addNode(2613.301284802446, 5465.837534620601, 0.0, 2443)
femmesh.addNode(2651.324329991671, 5593.519762255277, 0.0, 2444)
femmesh.addNode(7492.805230065594, 3889.2757950003656, 0.0, 2445)
femmesh.addNode(7637.047258614788, 3883.8059000652147, 0.0, 2446)
femmesh.addNode(4689.57224860335, 2629.688525923396, 0.0, 2447)
femmesh.addNode(5048.563303244298, 194.3150999414254, 0.0, 2448)
femmesh.addNode(4973.626808335743, 88.28191878579314, 0.0, 2449)
femmesh.addNode(5111.973531945581, 106.03318115563226, 0.0, 2450)
femmesh.addNode(7767.616411639355, 4500.2294795710095, 0.0, 2451)
femmesh.addNode(7898.3630887285935, 4426.313320222045, 0.0, 2452)
femmesh.addNode(7886.0012637016725, 1708.767497656865, 0.0, 2453)
femmesh.addNode(7886.0012637016725, 1558.767497656865, 0.0, 2454)
femmesh.addNode(4151.759431047803, 635.2388467612418, 0.0, 2455)
femmesh.addNode(1952.333055088276, 4391.573609792662, 0.0, 2456)
femmesh.addNode(1863.5708083624659, 4283.278560746427, 0.0, 2457)
femmesh.addNode(7745.415303323853, 400.39687199404636, 0.0, 2458)
femmesh.addNode(7784.278005306229, 568.7663013812521, 0.0, 2459)
femmesh.addNode(7444.406622130618, 2127.2245826343687, 0.0, 2460)
femmesh.addNode(7539.605568403356, 2009.970362166631, 0.0, 2461)
femmesh.addNode(7576.623490383342, 2162.3440102482177, 0.0, 2462)
femmesh.addNode(4099.010483643227, 4308.684124677864, 0.0, 2463)
femmesh.addNode(4536.756240635801, 5078.465349141033, 0.0, 2464)
femmesh.addNode(4489.433627364148, 5197.806939952097, 0.0, 2465)
femmesh.addNode(391.3301942035577, 3781.6911850801857, 0.0, 2466)
femmesh.addNode(488.78143605654463, 3902.7028128642664, 0.0, 2467)
femmesh.addNode(358.1541327592917, 3894.362400891946, 0.0, 2468)
femmesh.addNode(7070.16996086603, 2092.2527863790046, 0.0, 2469)
femmesh.addNode(7071.96595482876, 2231.1024122403583, 0.0, 2470)
femmesh.addNode(6215.386832448854, 2344.1273978550366, 0.0, 2471)
femmesh.addNode(6062.518514235378, 2294.7523603809286, 0.0, 2472)
femmesh.addNode(7048.20307254084, 1946.1570577239108, 0.0, 2473)
femmesh.addNode(2095.58988040597, 877.2733777852054, 0.0, 2474)
femmesh.addNode(1988.0886251248583, 760.8672583411192, 0.0, 2475)
femmesh.addNode(116.93344090997864, 2926.7040379390633, 0.0, 2476)
femmesh.addNode(6711.647695441646, 5460.873543069259, 0.0, 2477)
femmesh.addNode(6108.081778996308, 1551.3760376033033, 0.0, 2478)
femmesh.addNode(5824.70042422002, 4329.6091104888765, 0.0, 2479)
femmesh.addNode(7893.881038120153, 4843.5549745512035, 0.0, 2480)
femmesh.addNode(1141.368266175176, 111.58761124481902, 0.0, 2481)
femmesh.addNode(476.9053833774626, 5019.379016778057, 0.0, 2482)
femmesh.addNode(6523.431548297397, 5743.853699666712, 0.0, 2483)
femmesh.addNode(5433.574452807978, 338.6273359773226, 0.0, 2484)
femmesh.addNode(7490.498456607675, 1869.0524349644654, 0.0, 2485)
femmesh.addNode(7622.715324860397, 1904.1718625783144, 0.0, 2486)
femmesh.addNode(7342.651721469343, 5363.1073978211125, 0.0, 2487)
femmesh.addNode(7497.214953841412, 5411.25054151868, 0.0, 2488)
femmesh.addNode(7042.684886885738, 4619.528440395774, 0.0, 2489)
femmesh.addNode(7111.832226635993, 4729.302781142092, 0.0, 2490)
femmesh.addNode(6955.28633515417, 4694.997859245264, 0.0, 2491)
femmesh.addNode(7144.62556010957, 4509.727608611854, 0.0, 2492)
femmesh.addNode(4660.543610730827, 4012.524389360341, 0.0, 2493)
femmesh.addNode(1352.6680902943776, 3751.0729036879484, 0.0, 2494)
femmesh.addNode(3996.815870595645, 2652.476664660566, 0.0, 2495)
femmesh.addNode(303.5960646867263, 412.2733005692271, 0.0, 2496)
femmesh.addNode(307.3336238270419, 289.3255968523885, 0.0, 2497)
femmesh.addNode(293.1128334507542, 5576.877308629781, 0.0, 2498)
femmesh.addNode(298.36441730390993, 5701.616392072685, 0.0, 2499)
femmesh.addNode(1833.958999174059, 1197.1804591443322, 0.0, 2500)
femmesh.addNode(1748.2149554623961, 1314.5166849052007, 0.0, 2501)
femmesh.addNode(3361.1546472566392, 4805.598534916015, 0.0, 2502)
femmesh.addNode(3491.396685351312, 4771.150712394583, 0.0, 2503)
femmesh.addNode(7054.104035233547, 2367.4395889335037, 0.0, 2504)
femmesh.addNode(7762.190908609022, 1197.4832181402292, 0.0, 2505)
femmesh.addNode(7891.280683239404, 1122.4832181402292, 0.0, 2506)
femmesh.addNode(1191.1573258678427, 2964.163301780661, 0.0, 2507)
femmesh.addNode(5081.213035609355, 677.2159977239722, 0.0, 2508)
femmesh.addNode(130.3514454531524, 3686.6753865539326, 0.0, 2509)
femmesh.addNode(130.3514454531524, 3836.6753865539326, 0.0, 2510)
femmesh.addNode(2852.206331759554, 5578.704856435921, 0.0, 2511)
femmesh.addNode(5841.361638860113, 3934.5377541835705, 0.0, 2512)
femmesh.addNode(5701.680088516518, 384.0321692576015, 0.0, 2513)
femmesh.addNode(6185.089031514185, 2098.8867787125273, 0.0, 2514)
femmesh.addNode(6068.128082529345, 2156.0370375940133, 0.0, 2515)
femmesh.addNode(6063.538717374286, 2034.0205899397401, 0.0, 2516)
femmesh.addNode(3777.7777777777687, 5907.198447368082, 0.0, 2517)
femmesh.addNode(3834.5980049068194, 5766.952675603183, 0.0, 2518)
femmesh.addNode(5328.786907915551, 670.1130611816892, 0.0, 2519)
femmesh.addNode(5411.291676286124, 580.0243371105179, 0.0, 2520)
femmesh.addNode(5184.662553985037, 731.0597985341112, 0.0, 2521)
femmesh.addNode(5323.94956587513, 781.7890824008543, 0.0, 2522)
femmesh.addNode(6182.220758782824, 5896.067716815613, 0.0, 2523)
femmesh.addNode(5308.243833402099, 1731.7411778695773, 0.0, 2524)
femmesh.addNode(6937.309267200111, 1885.144361025888, 0.0, 2525)
femmesh.addNode(7471.62663904884, 1026.6175344540225, 0.0, 2526)
femmesh.addNode(5060.834226107517, 1954.0894395492023, 0.0, 2527)
femmesh.addNode(5079.86070301301, 2105.2415241936756, 0.0, 2528)
femmesh.addNode(5074.114136953935, 5417.183874054526, 0.0, 2529)
femmesh.addNode(4942.644169530386, 5321.652469096563, 0.0, 2530)
femmesh.addNode(3139.36823846747, 720.220540328535, 0.0, 2531)
femmesh.addNode(6110.95330970288, 5501.862321940112, 0.0, 2532)
femmesh.addNode(6089.069022904099, 5628.459742318297, 0.0, 2533)
femmesh.addNode(5236.679055602019, 2125.7840459131385, 0.0, 2534)
femmesh.addNode(5145.287468856268, 2218.408831382465, 0.0, 2535)
femmesh.addNode(3726.172893423979, 620.8592050822292, 0.0, 2536)
femmesh.addNode(3693.6245986720805, 463.7576461437214, 0.0, 2537)
femmesh.addNode(1060.176182195958, 5171.115962279659, 0.0, 2538)
femmesh.addNode(1213.276902899033, 5216.171264269253, 0.0, 2539)
femmesh.addNode(2635.594583997136, 102.22787111469306, 0.0, 2540)
femmesh.addNode(2487.4464358489886, 102.22787111469306, 0.0, 2541)
femmesh.addNode(4845.172517666984, 5286.978330871377, 0.0, 2542)
femmesh.addNode(4695.131483829162, 5280.507678126924, 0.0, 2543)
femmesh.addNode(1095.9362886571305, 3671.785166436931, 0.0, 2544)
femmesh.addNode(6239.280386208654, 4923.2108484379905, 0.0, 2545)
femmesh.addNode(6362.3673751694005, 4839.9673084841015, 0.0, 2546)
femmesh.addNode(100.42348280508224, 3554.321575836872, 0.0, 2547)
femmesh.addNode(230.77492825823464, 3640.9969623908046, 0.0, 2548)
femmesh.addNode(4213.931775454766, 4635.284205533415, 0.0, 2549)
femmesh.addNode(4952.634287942052, 301.6609767181696, 0.0, 2550)
femmesh.addNode(1335.4924573430649, 759.7188390693628, 0.0, 2551)
femmesh.addNode(1379.071590705946, 619.2516533443015, 0.0, 2552)
femmesh.addNode(7439.7910727648205, 4887.069243116974, 0.0, 2553)
femmesh.addNode(7600.151867690002, 4943.858713407053, 0.0, 2554)
femmesh.addNode(5839.759353010744, 2124.592676582127, 0.0, 2555)
femmesh.addNode(5940.968200095478, 2229.8861716081415, 0.0, 2556)
femmesh.addNode(7419.631849696736, 1228.4172560820866, 0.0, 2557)
femmesh.addNode(4477.378988901246, 1845.2412286192164, 0.0, 2558)
femmesh.addNode(7095.681856080333, 5115.000245649995, 0.0, 2559)
femmesh.addNode(7162.313040724181, 4990.484854523656, 0.0, 2560)
femmesh.addNode(6397.320807710878, 1975.9371999306256, 0.0, 2561)
femmesh.addNode(7169.293273036862, 4873.546980325669, 0.0, 2562)
femmesh.addNode(7766.753428234247, 2992.360412841976, 0.0, 2563)
femmesh.addNode(7654.520921535369, 2996.9595518513324, 0.0, 2564)
femmesh.addNode(7904.05724182452, 2926.4838779695324, 0.0, 2565)
femmesh.addNode(110.78878395542299, 5888.716697528895, 0.0, 2566)
femmesh.addNode(110.90276318861717, 111.5046274715365, 0.0, 2567)
femmesh.addNode(1955.854434934521, 5407.836734984339, 0.0, 2568)
femmesh.addNode(1934.7157777225402, 5571.049607490539, 0.0, 2569)
femmesh.addNode(259.0509113367651, 111.5046274715365, 0.0, 2570)
femmesh.addNode(4010.035992986383, 5067.624827353595, 0.0, 2571)
femmesh.addNode(5016.871547225318, 5562.292634313948, 0.0, 2572)
femmesh.addNode(2004.355734650579, 1003.6731999280794, 0.0, 2573)
femmesh.addNode(5467.657362160575, 450.3425063853548, 0.0, 2574)
femmesh.addNode(6546.429374335172, 5634.424237854843, 0.0, 2575)
femmesh.addNode(6626.253568072878, 5542.691342233877, 0.0, 2576)
femmesh.addNode(214.93510828960922, 2981.9169467274933, 0.0, 2577)
femmesh.addNode(98.00166737963059, 3055.2129087884296, 0.0, 2578)
femmesh.addNode(3421.699146914333, 1914.2824342497508, 0.0, 2579)
femmesh.addNode(256.84361909976093, 3409.1693786874166, 0.0, 2580)
femmesh.addNode(100.42348280508224, 3404.321575836872, 0.0, 2581)
femmesh.addNode(156.42013629467868, 3304.8478028505447, 0.0, 2582)
femmesh.addNode(3297.845950336807, 1979.3636018761258, 0.0, 2583)
femmesh.addNode(2111.494959531288, 5402.165231949035, 0.0, 2584)
femmesh.addNode(7767.218753357154, 3888.0377876925577, 0.0, 2585)
femmesh.addNode(7897.375199756655, 3823.5283138999966, 0.0, 2586)
femmesh.addNode(4494.586412739899, 1625.293771714785, 0.0, 2587)
femmesh.addNode(5314.493571989751, 3194.8147337831524, 0.0, 2588)
femmesh.addNode(5431.722782467962, 3288.5088135012975, 0.0, 2589)
femmesh.addNode(6472.2372881942065, 4477.7161505895965, 0.0, 2590)
femmesh.addNode(6351.7359406318765, 4448.7102196662545, 0.0, 2591)
femmesh.addNode(6491.002544333656, 4612.984600533326, 0.0, 2592)
femmesh.addNode(4874.986173880135, 5501.067465950926, 0.0, 2593)
femmesh.addNode(4928.814632382758, 5631.550603783047, 0.0, 2594)
femmesh.addNode(2505.1276648694743, 243.65623252638255, 0.0, 2595)
femmesh.addNode(2388.051599390854, 141.4283614116895, 0.0, 2596)
femmesh.addNode(5232.46836146338, 2332.850268016964, 0.0, 2597)
femmesh.addNode(5075.6500088743705, 2312.3077462975007, 0.0, 2598)
femmesh.addNode(5546.827725057972, 2918.2870824174242, 0.0, 2599)
femmesh.addNode(5657.046057837802, 3018.2060687076782, 0.0, 2600)
femmesh.addNode(5545.85935058849, 3043.2051337004123, 0.0, 2601)
femmesh.addNode(6784.397481046441, 1319.3236153928885, 0.0, 2602)
femmesh.addNode(6919.299419246881, 1366.2001516691444, 0.0, 2603)
femmesh.addNode(6791.292751751382, 1427.1798740694455, 0.0, 2604)
femmesh.addNode(236.35067309288235, 3910.999812240847, 0.0, 2605)
femmesh.addNode(2989.7280193887746, 4771.645797489927, 0.0, 2606)
femmesh.addNode(5535.29547393719, 3157.86572215632, 0.0, 2607)
femmesh.addNode(5695.7320150122305, 3764.342898788847, 0.0, 2608)
femmesh.addNode(5557.533029672991, 3765.1236057602437, 0.0, 2609)
femmesh.addNode(98.00166737963059, 3205.2129087884296, 0.0, 2610)
femmesh.addNode(254.42180367430927, 3210.0607116389747, 0.0, 2611)
femmesh.addNode(6191.448525274055, 5684.543321643532, 0.0, 2612)
femmesh.addNode(3352.6451484495615, 5290.84180364593, 0.0, 2613)
femmesh.addNode(1552.3516739500826, 1395.2794040667409, 0.0, 2614)
femmesh.addNode(2146.7768651874158, 1904.3750733545219, 0.0, 2615)
femmesh.addNode(2272.215596335718, 2862.535104078328, 0.0, 2616)
femmesh.addNode(3855.8048364279684, 5052.813459076387, 0.0, 2617)
femmesh.addNode(7660.256530199369, 4559.011258847239, 0.0, 2618)
femmesh.addNode(7689.366296017201, 4411.408419720321, 0.0, 2619)
femmesh.addNode(1556.2072877188864, 2219.133860143055, 0.0, 2620)
femmesh.addNode(1556.0082847423255, 2386.784601540141, 0.0, 2621)
femmesh.addNode(6282.877494237949, 1772.4559401550223, 0.0, 2622)
femmesh.addNode(6277.882158344801, 1921.4310622808453, 0.0, 2623)
femmesh.addNode(6128.585517567734, 5107.713170821601, 0.0, 2624)
femmesh.addNode(6260.038630872973, 5179.667651125733, 0.0, 2625)
femmesh.addNode(6148.404264049585, 5250.046552797077, 0.0, 2626)
femmesh.addNode(4731.174438629094, 4490.015304252005, 0.0, 2627)
femmesh.addNode(2282.6789971124063, 109.67753616863163, 0.0, 2628)
femmesh.addNode(5551.582189442877, 3640.453973939028, 0.0, 2629)
femmesh.addNode(4971.525921891511, 5755.009349339047, 0.0, 2630)
femmesh.addNode(4829.640548546327, 5693.784180976024, 0.0, 2631)
femmesh.addNode(7592.520141242945, 5298.125329934728, 0.0, 2632)
femmesh.addNode(637.3918254899908, 4092.704735424311, 0.0, 2633)
femmesh.addNode(1401.5922856845714, 369.46620785648315, 0.0, 2634)
femmesh.addNode(7635.323120369051, 5425.209829664261, 0.0, 2635)
femmesh.addNode(5326.02479418699, 2021.007708533041, 0.0, 2636)
femmesh.addNode(5253.8598046218385, 1918.842584409984, 0.0, 2637)
femmesh.addNode(688.194567213149, 2117.089940264579, 0.0, 2638)
femmesh.addNode(2243.3054733248873, 4572.667543500702, 0.0, 2639)
femmesh.addNode(2308.622893478505, 4696.82524537733, 0.0, 2640)
femmesh.addNode(5451.9988169994685, 3748.98341940141, 0.0, 2641)
femmesh.addNode(931.298806358513, 5100.122284264731, 0.0, 2642)
femmesh.addNode(1034.9537999726672, 5043.638728462898, 0.0, 2643)
femmesh.addNode(105.99922763972995, 3974.3244256869143, 0.0, 2644)
femmesh.addNode(1754.92694796851, 360.52511736123574, 0.0, 2645)
femmesh.addNode(1857.2284524103889, 420.7081432509244, 0.0, 2646)
femmesh.addNode(7462.711174818791, 5898.469704108169, 0.0, 2647)
femmesh.addNode(7610.859322966939, 5898.469704108169, 0.0, 2648)
femmesh.addNode(572.426944733633, 3994.427547287217, 0.0, 2649)
femmesh.addNode(2092.661353999556, 4451.116649005624, 0.0, 2650)
femmesh.addNode(5425.0771411573605, 3057.946735866066, 0.0, 2651)
femmesh.addNode(823.8063055925209, 958.3789927965289, 0.0, 2652)
femmesh.addNode(1956.2402592376873, 1224.0588300891889, 0.0, 2653)
femmesh.addNode(1913.4632791364752, 1092.5778542010212, 0.0, 2654)
femmesh.addNode(6263.992648712656, 5780.896409983548, 0.0, 2655)
femmesh.addNode(2362.163236979447, 3193.295870701359, 0.0, 2656)
femmesh.addNode(1442.2092460877725, 2017.7614756694443, 0.0, 2657)
femmesh.addNode(1554.1594997076706, 2081.3927288397813, 0.0, 2658)
femmesh.addNode(4584.203414933025, 5307.812799989564, 0.0, 2659)
femmesh.addNode(2739.2049236701832, 109.73864348806697, 0.0, 2660)
femmesh.addNode(7345.616560927223, 3934.376646464009, 0.0, 2661)
femmesh.addNode(7176.287151557103, 2557.4593068358313, 0.0, 2662)
femmesh.addNode(7295.904619594683, 2613.7696534881416, 0.0, 2663)
femmesh.addNode(4195.591032525559, 100.573380244588, 0.0, 2664)
femmesh.addNode(1990.1029343179612, 3184.9769766839822, 0.0, 2665)
femmesh.addNode(1886.1322983095133, 3071.703391101808, 0.0, 2666)
femmesh.addNode(1560.2157514069518, 3111.2100609501767, 0.0, 2667)
femmesh.addNode(301.5632307981544, 3067.642585683454, 0.0, 2668)
femmesh.addNode(6829.053179941395, 1195.1995290316436, 0.0, 2669)
femmesh.addNode(6701.046512445896, 1256.1792514319445, 0.0, 2670)
femmesh.addNode(3222.8296064155475, 5322.218813603351, 0.0, 2671)
femmesh.addNode(7921.1412102918985, 525.0, 0.0, 2672)
femmesh.addNode(2447.144653489046, 4690.688551140751, 0.0, 2673)
femmesh.addNode(315.76931508266233, 3534.406265616755, 0.0, 2674)
femmesh.addNode(5828.412435296673, 3787.833991024616, 0.0, 2675)
femmesh.addNode(808.1192050746827, 5018.940778458758, 0.0, 2676)
femmesh.addNode(3283.6679993265525, 2374.040641519102, 0.0, 2677)
femmesh.addNode(3185.2226441302837, 2314.1945777317187, 0.0, 2678)
femmesh.addNode(3288.899259987108, 2232.4400011275666, 0.0, 2679)
femmesh.addNode(5776.6261239153955, 2962.6366875581657, 0.0, 2680)
femmesh.addNode(5895.491913793458, 2918.9718253497085, 0.0, 2681)
femmesh.addNode(7529.016437598847, 5667.637224732275, 0.0, 2682)
femmesh.addNode(7386.985162561353, 5768.448213554893, 0.0, 2683)
femmesh.addNode(5780.264086517045, 5139.877032495381, 0.0, 2684)
femmesh.addNode(5901.607571144612, 5093.812071992178, 0.0, 2685)
femmesh.addNode(5895.607666116779, 5224.157032996149, 0.0, 2686)
femmesh.addNode(1031.9721082748265, 3117.752558171589, 0.0, 2687)
femmesh.addNode(1046.3381878217474, 3282.4582528137544, 0.0, 2688)
femmesh.addNode(2273.650799941209, 3512.6616122993582, 0.0, 2689)
femmesh.addNode(7076.534636184158, 2488.840749565937, 0.0, 2690)
femmesh.addNode(4008.9672293564763, 3257.657517780044, 0.0, 2691)
femmesh.addNode(7914.686159547134, 256.59156586830204, 0.0, 2692)
femmesh.addNode(7914.686159547134, 106.59156586830207, 0.0, 2693)
femmesh.addNode(7268.239323177693, 1801.8704274251293, 0.0, 2694)
femmesh.addNode(7391.36032479119, 1858.853973736414, 0.0, 2695)
femmesh.addNode(456.6491571052659, 5283.575133473902, 0.0, 2696)
femmesh.addNode(577.7742946692088, 5241.9257754813225, 0.0, 2697)
femmesh.addNode(1864.8375266944804, 836.4365866076579, 0.0, 2698)
femmesh.addNode(1882.0744745869506, 976.7948289832227, 0.0, 2699)
femmesh.addNode(5999.090007472716, 5302.432753358667, 0.0, 2700)
femmesh.addNode(4954.6944375460525, 5908.621463266012, 0.0, 2701)
femmesh.addNode(3083.0527853179483, 2228.8692712038055, 0.0, 2702)
femmesh.addNode(5903.101833087607, 4976.537545429586, 0.0, 2703)
femmesh.addNode(6018.445412687341, 5060.817545930353, 0.0, 2704)
femmesh.addNode(1368.2335754277613, 500.6214511106815, 0.0, 2705)
femmesh.addNode(5267.3322335032735, 2451.1059868288985, 0.0, 2706)
femmesh.addNode(320.49500432850243, 2939.133714834087, 0.0, 2707)
femmesh.addNode(7283.640478059639, 4514.092227226207, 0.0, 2708)
femmesh.addNode(1558.3299149669779, 4182.097224201347, 0.0, 2709)
femmesh.addNode(7756.91148907129, 1483.767497656865, 0.0, 2710)
femmesh.addNode(7315.388742017349, 4657.721928377959, 0.0, 2711)
femmesh.addNode(7399.798441709426, 4517.980793311563, 0.0, 2712)
femmesh.addNode(7498.487990013738, 4746.713610018753, 0.0, 2713)
femmesh.addNode(7372.849788418218, 4801.966127561536, 0.0, 2714)
femmesh.addNode(6256.3396882255, 3584.656482390563, 0.0, 2715)
femmesh.addNode(2492.632774866953, 4545.431665964435, 0.0, 2716)
femmesh.addNode(2567.3542907940873, 4662.4470878823195, 0.0, 2717)
femmesh.addNode(6637.579614943748, 4853.868871481289, 0.0, 2718)
femmesh.addNode(6708.899886682152, 4740.084469253945, 0.0, 2719)
femmesh.addNode(6557.714134051019, 4757.062787015134, 0.0, 2720)
femmesh.addNode(7666.503583008909, 1260.5503767195992, 0.0, 2721)
femmesh.addNode(7686.874040878695, 1108.0335948598283, 0.0, 2722)
femmesh.addNode(6758.643947427964, 96.4155087544391, 0.0, 2723)
femmesh.addNode(7763.134361030914, 4767.471133900168, 0.0, 2724)
femmesh.addNode(4287.498229008298, 4005.349137376889, 0.0, 2725)
femmesh.addNode(4140.464044921751, 4070.9908060634884, 0.0, 2726)
femmesh.addNode(4176.56804922822, 3930.7253438244015, 0.0, 2727)
femmesh.addNode(4801.966296252004, 229.9075572829032, 0.0, 2728)
femmesh.addNode(7901.736720786551, 5613.515101130631, 0.0, 2729)
femmesh.addNode(7901.736720786551, 5463.515101130631, 0.0, 2730)
femmesh.addNode(3118.4192941258602, 3368.3475952775993, 0.0, 2731)
femmesh.addNode(3094.83467663271, 3483.92654643438, 0.0, 2732)
femmesh.addNode(594.9752244469839, 2238.9596325531816, 0.0, 2733)
femmesh.addNode(7256.69182476843, 4798.07756147618, 0.0, 2734)
femmesh.addNode(3803.6265579023266, 4935.559863178268, 0.0, 2735)
femmesh.addNode(3749.135702776643, 4821.7838102812875, 0.0, 2736)
femmesh.addNode(1784.0844683180717, 4168.426601282251, 0.0, 2737)
femmesh.addNode(596.5073196530822, 886.5852351087277, 0.0, 2738)
femmesh.addNode(593.7853437392548, 736.303639041284, 0.0, 2739)
femmesh.addNode(2692.652344578464, 4619.198506208669, 0.0, 2740)
femmesh.addNode(1303.8556400474167, 3639.100843824768, 0.0, 2741)
femmesh.addNode(1194.879169632125, 3581.016709554191, 0.0, 2742)
femmesh.addNode(502.49217994956723, 1816.0985582947685, 0.0, 2743)
femmesh.addNode(456.0014375414486, 709.5783627447208, 0.0, 2744)
femmesh.addNode(7632.104131564587, 1456.1984948328911, 0.0, 2745)
femmesh.addNode(7593.431777504633, 1344.1554824581362, 0.0, 2746)
femmesh.addNode(4593.0157091539495, 4644.713628084351, 0.0, 2747)
femmesh.addNode(3095.1652737310123, 5162.428934289019, 0.0, 2748)
femmesh.addNode(3099.918956919554, 5035.167607841227, 0.0, 2749)
femmesh.addNode(2618.2817741470635, 567.0648284959196, 0.0, 2750)
femmesh.addNode(2577.6261512403, 454.59164249814893, 0.0, 2751)
femmesh.addNode(2699.462756967855, 507.49574036061114, 0.0, 2752)
femmesh.addNode(1879.959221718284, 709.413670238496, 0.0, 2753)
femmesh.addNode(989.5233599800398, 2997.1699405431127, 0.0, 2754)
femmesh.addNode(1137.364813369896, 3061.084799370647, 0.0, 2755)
femmesh.addNode(3464.333645638849, 337.6147253167777, 0.0, 2756)
femmesh.addNode(345.6972777307325, 3666.7600763338155, 0.0, 2757)
femmesh.addNode(7624.444699991831, 5763.727781096632, 0.0, 2758)
femmesh.addNode(7767.050874671842, 5419.557410170785, 0.0, 2759)
femmesh.addNode(7671.745687270311, 5532.682621754737, 0.0, 2760)
femmesh.addNode(6336.219704681052, 5563.874836183986, 0.0, 2761)
femmesh.addNode(6260.26756627975, 5449.476121378524, 0.0, 2762)
femmesh.addNode(5417.673171612936, 840.0822900501512, 0.0, 2763)
femmesh.addNode(5468.352116726914, 976.5400780547818, 0.0, 2764)
femmesh.addNode(7651.967960090932, 4739.824522136059, 0.0, 2765)
femmesh.addNode(7585.02667574433, 4654.721406580622, 0.0, 2766)
femmesh.addNode(590.8730654941548, 5105.174056849854, 0.0, 2767)
femmesh.addNode(5300.573835278807, 3082.0169152028675, 0.0, 2768)
femmesh.addNode(7677.718266150347, 1801.309323048432, 0.0, 2769)
femmesh.addNode(7083.465153357667, 2620.550403775077, 0.0, 2770)
femmesh.addNode(2977.2903208384223, 5003.770636680148, 0.0, 2771)
femmesh.addNode(7776.915423319661, 1728.4498530172227, 0.0, 2772)
femmesh.addNode(2206.6517901730513, 5415.864639912112, 0.0, 2773)
femmesh.addNode(2335.5011295059785, 5354.246923872386, 0.0, 2774)
femmesh.addNode(2231.1930045025965, 2749.9331900925263, 0.0, 2775)
femmesh.addNode(2116.949903939014, 2745.1157427245976, 0.0, 2776)
femmesh.addNode(3090.5096679272183, 1973.9434580850643, 0.0, 2777)
femmesh.addNode(7407.553687381191, 3011.670717704241, 0.0, 2778)
femmesh.addNode(7032.488981453762, 1261.5790377830447, 0.0, 2779)
femmesh.addNode(6934.72514258318, 1145.9923735764776, 0.0, 2780)
femmesh.addNode(7547.48101218758, 2957.7056702197638, 0.0, 2781)
femmesh.addNode(7506.776560122176, 4565.900346729933, 0.0, 2782)
femmesh.addNode(6825.910428638783, 1540.190994231118, 0.0, 2783)
femmesh.addNode(4806.546289397904, 5908.621463266012, 0.0, 2784)
femmesh.addNode(4404.373256268787, 1405.8195112030262, 0.0, 2785)
femmesh.addNode(4459.941668519845, 1239.3196121815397, 0.0, 2786)
femmesh.addNode(6299.938315725203, 2031.3764687080347, 0.0, 2787)
femmesh.addNode(6304.527680880263, 2153.392916362308, 0.0, 2788)
femmesh.addNode(1657.0471055781463, 2157.994198694995, 0.0, 2789)
femmesh.addNode(5931.561815719584, 4579.105083232498, 0.0, 2790)
femmesh.addNode(3573.7071453638955, 402.9681063338551, 0.0, 2791)
femmesh.addNode(3876.2398578528923, 1937.8537342767063, 0.0, 2792)
femmesh.addNode(334.85878330969365, 5332.249292443263, 0.0, 2793)
femmesh.addNode(6242.503352110847, 2577.2370048407993, 0.0, 2794)
femmesh.addNode(6135.531193201965, 2655.035084132647, 0.0, 2795)
femmesh.addNode(7443.722145444895, 2916.131081442254, 0.0, 2796)
femmesh.addNode(2724.767522977189, 4467.997639910464, 0.0, 2797)
femmesh.addNode(2653.0832931327786, 4385.654148517931, 0.0, 2798)
femmesh.addNode(5939.219320903606, 1967.3631799611549, 0.0, 2799)
femmesh.addNode(5830.280314426501, 2014.989932282678, 0.0, 2800)
femmesh.addNode(4825.478660187593, 88.28191878579314, 0.0, 2801)
femmesh.addNode(7293.999097729551, 2718.3297129164116, 0.0, 2802)
femmesh.addNode(7354.287673683498, 2858.7932968572914, 0.0, 2803)
femmesh.addNode(2188.8628735260536, 1314.2388555601424, 0.0, 2804)
femmesh.addNode(2065.49972956501, 1246.5938578184102, 0.0, 2805)
femmesh.addNode(6148.186506164915, 84.85350532801614, 0.0, 2806)
femmesh.addNode(5036.611505921477, 3524.872608795302, 0.0, 2807)
femmesh.addNode(1806.339535130754, 1433.0208694381408, 0.0, 2808)
femmesh.addNode(1937.110726156537, 1445.9346925830646, 0.0, 2809)
femmesh.addNode(335.6576004627676, 667.3883902778621, 0.0, 2810)
femmesh.addNode(1677.8185113178793, 1418.8761193108417, 0.0, 2811)
femmesh.addNode(1862.6271199323423, 566.5512935247397, 0.0, 2812)
femmesh.addNode(516.0706067572303, 83.55204411095671, 0.0, 2813)
femmesh.addNode(367.92245860908235, 83.55204411095671, 0.0, 2814)
femmesh.addNode(7518.114909774306, 1254.7058591777354, 0.0, 2815)
femmesh.addNode(3863.857725918394, 2594.557228508348, 0.0, 2816)
femmesh.addNode(7921.1412102918985, 375.0, 0.0, 2817)
femmesh.addNode(2188.2586868211633, 4479.424053907744, 0.0, 2818)
femmesh.addNode(2317.3763510581875, 4480.42963962991, 0.0, 2819)
femmesh.addNode(6419.6822725952525, 4726.7690027606695, 0.0, 2820)
femmesh.addNode(1675.895289287872, 4101.710307914334, 0.0, 2821)
femmesh.addNode(2127.065391580456, 5512.719915607711, 0.0, 2822)
femmesh.addNode(2326.7202802181346, 3657.5495722159367, 0.0, 2823)
femmesh.addNode(2219.4174884654653, 3630.921403017675, 0.0, 2824)
femmesh.addNode(2146.269142518914, 5632.64055919387, 0.0, 2825)
femmesh.addNode(7767.1739496632945, 5628.773178119092, 0.0, 2826)
femmesh.addNode(7635.446195360503, 5634.425597612568, 0.0, 2827)
femmesh.addNode(3304.58516659164, 2113.522378552763, 0.0, 2828)
femmesh.addNode(1512.2989055053608, 3564.7389918982044, 0.0, 2829)
femmesh.addNode(1445.6959663942662, 3683.225182112769, 0.0, 2830)
femmesh.addNode(5874.112151942791, 4415.688058165883, 0.0, 2831)
femmesh.addNode(5986.381657976977, 4469.817508389297, 0.0, 2832)
femmesh.addNode(2160.390211900678, 3731.370874944131, 0.0, 2833)
femmesh.addNode(6313.221878643277, 5673.304297995855, 0.0, 2834)
femmesh.addNode(3093.103896076495, 5288.8309918713985, 0.0, 2835)
femmesh.addNode(3183.7146885360066, 2038.4351606635464, 0.0, 2836)
femmesh.addNode(5680.283738716907, 4908.476442306897, 0.0, 2837)
femmesh.addNode(5657.253397886816, 5041.820574359235, 0.0, 2838)
femmesh.addNode(6304.220575694168, 350.41740016516593, 0.0, 2839)
femmesh.addNode(5795.073321916283, 4896.668762365191, 0.0, 2840)
femmesh.addNode(4131.721129930226, 4183.122210775116, 0.0, 2841)
femmesh.addNode(532.0878641897489, 232.69052162068317, 0.0, 2842)
femmesh.addNode(6273.610030272849, 3458.3665061372903, 0.0, 2843)
femmesh.addNode(3302.2455801354035, 2493.5824736050963, 0.0, 2844)
femmesh.addNode(3308.8180147757785, 2634.5895258767396, 0.0, 2845)
femmesh.addNode(5723.461899047799, 2048.4117600435784, 0.0, 2846)
femmesh.addNode(416.2051710993592, 261.37301349180876, 0.0, 2847)
femmesh.addNode(4631.590010091793, 4848.993605056891, 0.0, 2848)
femmesh.addNode(4576.071388573897, 4957.4409255914015, 0.0, 2849)
femmesh.addNode(2451.0889984154237, 398.5087267284182, 0.0, 2850)
femmesh.addNode(2532.2699812362152, 338.93963859310975, 0.0, 2851)
femmesh.addNode(7474.470081248231, 1753.0554741480973, 0.0, 2852)
femmesh.addNode(6691.008490438342, 1493.3144579548623, 0.0, 2853)
femmesh.addNode(5546.722382201948, 349.7635131900739, 0.0, 2854)
femmesh.addNode(2546.5642466824947, 4422.4121677522, 0.0, 2855)
femmesh.addNode(4671.15105512471, 4724.552709315614, 0.0, 2856)
femmesh.addNode(4740.354523571539, 4832.064393381852, 0.0, 2857)
femmesh.addNode(7583.034006507578, 4361.677701232387, 0.0, 2858)
femmesh.addNode(7547.9980127273275, 1662.8254625209656, 0.0, 2859)
femmesh.addNode(6408.763828119652, 5660.227924524003, 0.0, 2860)
femmesh.addNode(6069.527034234355, 2749.0031210052875, 0.0, 2861)
femmesh.addNode(6003.82456506071, 2840.5084035456916, 0.0, 2862)
femmesh.addNode(5961.448481237054, 2714.4530104673718, 0.0, 2863)
femmesh.addNode(4991.01250179391, 4803.570090118723, 0.0, 2864)
femmesh.addNode(6863.027870269966, 4063.984332552514, 0.0, 2865)
femmesh.addNode(2051.112311877151, 5618.941151230793, 0.0, 2866)
femmesh.addNode(336.4013697184554, 1637.8439269385406, 0.0, 2867)
femmesh.addNode(4767.850148190242, 5802.408098235819, 0.0, 2868)
femmesh.addNode(6003.5704667907585, 2953.5219358876243, 0.0, 2869)
femmesh.addNode(2631.66481769435, 299.7391482961133, 0.0, 2870)
femmesh.addNode(4266.74581741122, 1596.7449521006142, 0.0, 2871)
femmesh.addNode(4358.4447701053705, 1496.8744645491265, 0.0, 2872)
femmesh.addNode(4377.922585678536, 1612.598805192517, 0.0, 2873)
femmesh.addNode(6318.49276473399, 2275.319135038351, 0.0, 2874)
femmesh.addNode(6189.694838215145, 202.33293728871078, 0.0, 2875)
femmesh.addNode(6200.57044915807, 306.09800911942807, 0.0, 2876)
femmesh.addNode(2166.7322317328553, 4358.878745134833, 0.0, 2877)
femmesh.addNode(7647.195169896642, 1589.9659924897564, 0.0, 2878)
femmesh.addNode(1624.707728964151, 5667.539205576473, 0.0, 2879)
femmesh.addNode(5902.004762756282, 2603.548027231154, 0.0, 2880)
femmesh.addNode(658.5439332172241, 2343.676240569629, 0.0, 2881)
femmesh.addNode(3242.6932676078613, 4840.457063310233, 0.0, 2882)
femmesh.addNode(3112.3566554699073, 4803.042768651007, 0.0, 2883)
femmesh.addNode(4867.444909548183, 4812.363047201065, 0.0, 2884)
femmesh.addNode(4199.519027905174, 3822.3900079590553, 0.0, 2885)
femmesh.addNode(3607.6803049492855, 4789.81985953708, 0.0, 2886)
femmesh.addNode(345.94506173688836, 2834.483487261886, 0.0, 2887)
femmesh.addNode(1985.995449602594, 1351.1964627617212, 0.0, 2888)
femmesh.addNode(4402.887460045749, 1725.1642592662727, 0.0, 2889)
femmesh.addNode(6837.452181489398, 4711.057036109523, 0.0, 2890)
femmesh.addNode(2052.6822841742187, 3798.634265786755, 0.0, 2891)
femmesh.addNode(4903.483160850183, 4698.830540558232, 0.0, 2892)
femmesh.addNode(4809.3097845998545, 4569.854385483269, 0.0, 2893)
femmesh.addNode(6130.543120777955, 5374.387233662798, 0.0, 2894)
femmesh.addNode(3097.248884182051, 2108.1022347617013, 0.0, 2895)
femmesh.addNode(4963.36939637938, 1263.2893802262624, 0.0, 2896)
femmesh.addNode(5018.462037048852, 1160.1072648416337, 0.0, 2897)
femmesh.addNode(556.8151422055747, 2878.6131406942914, 0.0, 2898)
femmesh.addNode(432.57318424543354, 2920.2091262178465, 0.0, 2899)
femmesh.addNode(2426.3546093774535, 4450.653631010631, 0.0, 2900)
femmesh.addNode(6130.079779510728, 4990.438644259009, 0.0, 2901)
femmesh.addNode(3877.0887502901505, 1038.8207332518448, 0.0, 2902)
femmesh.addNode(2016.137038539249, 2802.6165989970686, 0.0, 2903)
femmesh.addNode(3068.7680641111556, 4919.027177062853, 0.0, 2904)
femmesh.addNode(417.23730466349434, 1747.6367650741527, 0.0, 2905)
femmesh.addNode(4293.196488001472, 1389.9656581111237, 0.0, 2906)
femmesh.addNode(2831.8841535070596, 329.6786096007405, 0.0, 2907)
femmesh.addNode(2755.416130200754, 417.4512432940938, 0.0, 2908)
femmesh.addNode(2735.2751573673972, 307.2499206694872, 0.0, 2909)
femmesh.addNode(4657.171716099756, 5389.766341894096, 0.0, 2910)
femmesh.addNode(7672.80537023403, 1690.3944653449396, 0.0, 2911)
femmesh.addNode(7475.028296164466, 4422.270645578181, 0.0, 2912)
femmesh.addNode(7570.109699126411, 1052.9061375496713, 0.0, 2913)
femmesh.addNode(7676.595675300324, 4859.4633373382985, 0.0, 2914)
femmesh.addNode(2708.1328410006563, 211.96651460276001, 0.0, 2915)
femmesh.addNode(4052.4848438186277, 3888.031676645655, 0.0, 2916)
femmesh.addNode(5163.690474715422, 1900.5155911518848, 0.0, 2917)
femmesh.addNode(5246.297989578499, 1806.7414368375196, 0.0, 2918)
femmesh.addNode(7835.827369839031, 331.59156586830204, 0.0, 2919)
femmesh.addNode(3443.775395295037, 2653.2162114927423, 0.0, 2920)
femmesh.addNode(2755.9802963080842, 5653.925183382404, 0.0, 2921)
femmesh.addNode(4701.780222633694, 4627.784416409311, 0.0, 2922)
femmesh.addNode(2796.071753107518, 529.9244292918645, 0.0, 2923)
femmesh.addNode(7492.70365520101, 1144.8121503435495, 0.0, 2924)
femmesh.addNode(4777.514522016733, 5466.393327725739, 0.0, 2925)
femmesh.addNode(330.67707364955163, 195.0566715824932, 0.0, 2926)
femmesh.addNode(1610.4762536184405, 1513.783588599681, 0.0, 2927)
femmesh.addNode(4945.099552552129, 1076.6164352832811, 0.0, 2928)
femmesh.addNode(3970.3481956878404, 3816.4130543320616, 0.0, 2929)
femmesh.addNode(526.2326455361365, 1706.6258563919796, 0.0, 2930)
femmesh.addNode(328.0222886664836, 523.4330013295549, 0.0, 2931)
femmesh.addNode(326.3942847204498, 5474.996943944811, 0.0, 2932)
femmesh.addNode(5769.880581962626, 4438.896685332078, 0.0, 2933)
femmesh.addNode(4865.5714368592935, 5397.135358256641, 0.0, 2934)
femmesh.addNode(5218.074503495682, 1713.414184611478, 0.0, 2935)
return True
def create_elements(femmesh):
# elements
femmesh.addFace([351, 515, 230, 853, 854, 855], 95)
femmesh.addFace([343, 457, 259, 856, 857, 858], 96)
femmesh.addFace([358, 487, 202, 859, 860, 861], 97)
femmesh.addFace([334, 702, 477, 862, 863, 864], 98)
femmesh.addFace([410, 507, 238, 865, 866, 867], 99)
femmesh.addFace([203, 479, 345, 868, 869, 870], 100)
femmesh.addFace([372, 816, 260, 871, 872, 873], 101)
femmesh.addFace([113, 526, 341, 874, 875, 876], 102)
femmesh.addFace([153, 444, 154, 877, 878, 180], 103)
femmesh.addFace([240, 620, 357, 879, 880, 881], 104)
femmesh.addFace([356, 500, 239, 882, 883, 884], 105)
femmesh.addFace([353, 669, 250, 885, 886, 887], 106)
femmesh.addFace([355, 646, 56, 888, 889, 890], 107)
femmesh.addFace([363, 604, 141, 891, 892, 893], 108)
femmesh.addFace([341, 689, 113, 894, 895, 876], 109)
femmesh.addFace([262, 473, 371, 896, 897, 898], 110)
femmesh.addFace([260, 816, 395, 872, 899, 900], 111)
femmesh.addFace([278, 724, 350, 901, 902, 903], 112)
femmesh.addFace([359, 595, 10, 904, 905, 906], 113)
femmesh.addFace([371, 473, 205, 897, 907, 908], 114)
femmesh.addFace([257, 402, 395, 909, 910, 911], 115)
femmesh.addFace([202, 674, 358, 912, 913, 861], 116)
femmesh.addFace([230, 455, 351, 914, 915, 855], 117)
femmesh.addFace([340, 666, 539, 916, 917, 918], 118)
femmesh.addFace([395, 402, 260, 910, 919, 900], 119)
femmesh.addFace([404, 431, 280, 920, 921, 922], 120)
femmesh.addFace([350, 461, 216, 923, 924, 925], 121)
femmesh.addFace([272, 469, 348, 926, 927, 928], 122)
femmesh.addFace([258, 426, 365, 929, 930, 931], 123)
femmesh.addFace([259, 495, 343, 932, 933, 858], 124)
femmesh.addFace([303, 443, 397, 934, 935, 936], 125)
femmesh.addFace([208, 554, 466, 937, 938, 939], 126)
femmesh.addFace([444, 642, 154, 940, 941, 878], 127)
femmesh.addFace([153, 489, 444, 942, 943, 877], 128)
femmesh.addFace([430, 477, 311, 944, 945, 946], 129)
femmesh.addFace([190, 477, 430, 947, 944, 948], 130)
femmesh.addFace([594, 658, 446, 949, 950, 951], 131)
femmesh.addFace([545, 627, 265, 952, 953, 954], 132)
femmesh.addFace([193, 608, 387, 955, 956, 957], 133)
femmesh.addFace([320, 517, 514, 958, 959, 960], 134)
femmesh.addFace([284, 637, 419, 961, 962, 963], 135)
femmesh.addFace([466, 554, 319, 938, 964, 965], 136)
femmesh.addFace([48, 778, 347, 966, 967, 968], 137)
femmesh.addFace([250, 594, 446, 969, 951, 970], 138)
femmesh.addFace([139, 606, 424, 971, 972, 973], 139)
femmesh.addFace([569, 637, 284, 974, 961, 975], 140)
femmesh.addFace([252, 462, 356, 976, 977, 978], 141)
femmesh.addFace([287, 491, 369, 979, 980, 981], 142)
femmesh.addFace([345, 609, 203, 982, 983, 870], 143)
femmesh.addFace([356, 462, 201, 977, 984, 985], 144)
femmesh.addFace([563, 663, 217, 986, 987, 988], 145)
femmesh.addFace([383, 467, 247, 989, 990, 991], 146)
femmesh.addFace([477, 702, 254, 863, 992, 993], 147)
femmesh.addFace([347, 502, 48, 994, 995, 968], 148)
femmesh.addFace([369, 480, 253, 996, 997, 998], 149)
femmesh.addFace([514, 517, 219, 959, 999, 1000], 150)
femmesh.addFace([250, 669, 367, 886, 1001, 1002], 151)
femmesh.addFace([233, 777, 361, 1003, 1004, 1005], 152)
femmesh.addFace([250, 485, 353, 1006, 1007, 887], 153)
femmesh.addFace([268, 524, 352, 1008, 1009, 1010], 154)
femmesh.addFace([410, 590, 507, 1011, 1012, 865], 155)
femmesh.addFace([496, 600, 336, 1013, 1014, 1015], 156)
femmesh.addFace([203, 429, 373, 1016, 1017, 1018], 157)
femmesh.addFace([143, 392, 144, 1019, 1020, 170], 158)
femmesh.addFace([430, 680, 231, 1021, 1022, 1023], 159)
femmesh.addFace([354, 675, 531, 1024, 1025, 1026], 160)
femmesh.addFace([252, 464, 462, 1027, 1028, 976], 161)
femmesh.addFace([271, 428, 389, 1029, 1030, 1031], 162)
femmesh.addFace([462, 464, 287, 1028, 1032, 1033], 163)
femmesh.addFace([231, 636, 430, 1034, 1035, 1023], 164)
femmesh.addFace([389, 428, 206, 1030, 1036, 1037], 165)
femmesh.addFace([352, 676, 268, 1038, 1039, 1010], 166)
femmesh.addFace([152, 489, 153, 1040, 942, 179], 167)
femmesh.addFace([240, 665, 620, 1041, 1042, 879], 168)
femmesh.addFace([302, 541, 408, 1043, 1044, 1045], 169)
femmesh.addFace([260, 850, 372, 1046, 1047, 873], 170)
femmesh.addFace([408, 541, 233, 1044, 1048, 1049], 171)
femmesh.addFace([409, 470, 275, 1050, 1051, 1052], 172)
femmesh.addFace([345, 550, 516, 1053, 1054, 1055], 173)
femmesh.addFace([268, 470, 409, 1056, 1050, 1057], 174)
femmesh.addFace([516, 550, 223, 1054, 1058, 1059], 175)
femmesh.addFace([216, 567, 350, 1060, 1061, 925], 176)
femmesh.addFace([381, 535, 46, 1062, 1063, 1064], 177)
femmesh.addFace([253, 458, 364, 1065, 1066, 1067], 178)
femmesh.addFace([348, 533, 272, 1068, 1069, 928], 179)
femmesh.addFace([364, 471, 253, 1070, 1071, 1067], 180)
femmesh.addFace([139, 424, 140, 973, 1072, 166], 181)
femmesh.addFace([282, 733, 377, 1073, 1074, 1075], 182)
femmesh.addFace([335, 735, 427, 1076, 1077, 1078], 183)
femmesh.addFace([498, 520, 264, 1079, 1080, 1081], 184)
femmesh.addFace([360, 690, 459, 1082, 1083, 1084], 185)
femmesh.addFace([248, 463, 422, 1085, 1086, 1087], 186)
femmesh.addFace([459, 690, 242, 1083, 1088, 1089], 187)
femmesh.addFace([350, 555, 278, 1090, 1091, 903], 188)
femmesh.addFace([339, 701, 393, 1092, 1093, 1094], 189)
femmesh.addFace([393, 701, 226, 1093, 1095, 1096], 190)
femmesh.addFace([201, 471, 364, 1097, 1070, 1098], 191)
femmesh.addFace([310, 664, 370, 1099, 1100, 1101], 192)
femmesh.addFace([225, 600, 496, 1102, 1013, 1103], 193)
femmesh.addFace([247, 442, 383, 1104, 1105, 991], 194)
femmesh.addFace([56, 521, 355, 1106, 1107, 890], 195)
femmesh.addFace([250, 446, 382, 970, 1108, 1109], 196)
femmesh.addFace([608, 722, 387, 1110, 1111, 956], 197)
femmesh.addFace([295, 569, 486, 1112, 1113, 1114], 198)
femmesh.addFace([531, 675, 228, 1025, 1115, 1116], 199)
femmesh.addFace([479, 550, 345, 1117, 1053, 869], 200)
femmesh.addFace([397, 671, 627, 1118, 1119, 1120], 201)
femmesh.addFace([376, 835, 412, 1121, 1122, 1123], 202)
femmesh.addFace([295, 611, 569, 1124, 1125, 1112], 203)
femmesh.addFace([370, 546, 232, 1126, 1127, 1128], 204)
femmesh.addFace([500, 581, 333, 1129, 1130, 1131], 205)
femmesh.addFace([201, 500, 356, 1132, 882, 985], 206)
femmesh.addFace([344, 665, 429, 1133, 1134, 1135], 207)
femmesh.addFace([315, 520, 498, 1136, 1079, 1137], 208)
femmesh.addFace([243, 535, 381, 1138, 1062, 1139], 209)
femmesh.addFace([200, 634, 385, 1140, 1141, 1142], 210)
femmesh.addFace([314, 674, 372, 1143, 1144, 1145], 211)
femmesh.addFace([287, 471, 462, 1146, 1147, 1033], 212)
femmesh.addFace([462, 471, 201, 1147, 1097, 984], 213)
femmesh.addFace([381, 800, 243, 1148, 1149, 1139], 214)
femmesh.addFace([429, 757, 344, 1150, 1151, 1135], 215)
femmesh.addFace([397, 627, 303, 1120, 1152, 936], 216)
femmesh.addFace([12, 394, 13, 1153, 1154, 32], 217)
femmesh.addFace([385, 679, 234, 1155, 1156, 1157], 218)
femmesh.addFace([455, 850, 260, 1158, 1046, 1159], 219)
femmesh.addFace([226, 660, 615, 1160, 1161, 1162], 220)
femmesh.addFace([273, 550, 479, 1163, 1117, 1164], 221)
femmesh.addFace([67, 635, 416, 1165, 1166, 1167], 222)
femmesh.addFace([355, 521, 275, 1107, 1168, 1169], 223)
femmesh.addFace([356, 587, 252, 1170, 1171, 978], 224)
femmesh.addFace([359, 505, 296, 1172, 1173, 1174], 225)
femmesh.addFace([8, 622, 425, 1175, 1176, 1177], 226)
femmesh.addFace([275, 571, 355, 1178, 1179, 1169], 227)
femmesh.addFace([99, 717, 495, 1180, 1181, 1182], 228)
femmesh.addFace([615, 660, 346, 1161, 1183, 1184], 229)
femmesh.addFace([201, 581, 500, 1185, 1129, 1132], 230)
femmesh.addFace([253, 670, 369, 1186, 1187, 998], 231)
femmesh.addFace([303, 683, 443, 1188, 1189, 934], 232)
femmesh.addFace([255, 591, 448, 1190, 1191, 1192], 233)
femmesh.addFace([274, 567, 389, 1193, 1194, 1195], 234)
femmesh.addFace([357, 515, 240, 1196, 1197, 881], 235)
femmesh.addFace([412, 835, 244, 1122, 1198, 1199], 236)
femmesh.addFace([533, 676, 352, 1200, 1038, 1201], 237)
femmesh.addFace([388, 789, 271, 1202, 1203, 1204], 238)
femmesh.addFace([365, 496, 258, 1205, 1206, 931], 239)
femmesh.addFace([374, 790, 575, 1207, 1208, 1209], 240)
femmesh.addFace([457, 508, 209, 1210, 1211, 1212], 241)
femmesh.addFace([143, 652, 392, 1213, 1214, 1019], 242)
femmesh.addFace([8, 425, 9, 1177, 1215, 28], 243)
femmesh.addFace([591, 727, 448, 1216, 1217, 1191], 244)
femmesh.addFace([373, 479, 203, 1218, 868, 1018], 245)
femmesh.addFace([239, 572, 356, 1219, 1220, 884], 246)
femmesh.addFace([296, 595, 359, 1221, 904, 1174], 247)
femmesh.addFace([422, 463, 19, 1086, 1222, 1223], 248)
femmesh.addFace([539, 743, 340, 1224, 1225, 918], 249)
femmesh.addFace([369, 491, 286, 980, 1226, 1227], 250)
femmesh.addFace([213, 467, 383, 1228, 989, 1229], 251)
femmesh.addFace([274, 570, 567, 1230, 1231, 1193], 252)
femmesh.addFace([208, 452, 407, 1232, 1233, 1234], 253)
femmesh.addFace([207, 819, 379, 1235, 1236, 1237], 254)
femmesh.addFace([374, 481, 279, 1238, 1239, 1240], 255)
femmesh.addFace([341, 585, 525, 1241, 1242, 1243], 256)
femmesh.addFace([361, 605, 233, 1244, 1245, 1005], 257)
femmesh.addFace([396, 635, 69, 1246, 1247, 1248], 258)
femmesh.addFace([539, 666, 273, 917, 1249, 1250], 259)
femmesh.addFace([236, 506, 366, 1251, 1252, 1253], 260)
femmesh.addFace([412, 762, 376, 1254, 1255, 1123], 261)
femmesh.addFace([449, 491, 210, 1256, 1257, 1258], 262)
femmesh.addFace([286, 491, 449, 1226, 1256, 1259], 263)
femmesh.addFace([58, 530, 409, 1260, 1261, 1262], 264)
femmesh.addFace([361, 777, 514, 1004, 1263, 1264], 265)
femmesh.addFace([265, 627, 492, 953, 1265, 1266], 266)
femmesh.addFace([18, 422, 19, 1267, 1223, 38], 267)
femmesh.addFace([352, 827, 533, 1268, 1269, 1201], 268)
femmesh.addFace([567, 570, 350, 1231, 1270, 1061], 269)
femmesh.addFace([10, 589, 359, 1271, 1272, 906], 270)
femmesh.addFace([434, 641, 222, 1273, 1274, 1275], 271)
femmesh.addFace([333, 641, 434, 1276, 1273, 1277], 272)
femmesh.addFace([349, 679, 456, 1278, 1279, 1280], 273)
femmesh.addFace([267, 631, 378, 1281, 1282, 1283], 274)
femmesh.addFace([382, 485, 250, 1284, 1006, 1109], 275)
femmesh.addFace([525, 585, 225, 1242, 1285, 1286], 276)
femmesh.addFace([68, 635, 67, 1287, 1165, 94], 277)
femmesh.addFace([189, 450, 388, 1288, 1289, 1290], 278)
femmesh.addFace([141, 602, 363, 1291, 1292, 893], 279)
femmesh.addFace([456, 679, 251, 1279, 1293, 1294], 280)
femmesh.addFace([246, 431, 404, 1295, 920, 1296], 281)
femmesh.addFace([427, 586, 335, 1297, 1298, 1078], 282)
femmesh.addFace([377, 733, 503, 1074, 1299, 1300], 283)
femmesh.addFace([258, 718, 426, 1301, 1302, 929], 284)
femmesh.addFace([437, 549, 305, 1303, 1304, 1305], 285)
femmesh.addFace([477, 799, 334, 1306, 1307, 864], 286)
femmesh.addFace([214, 549, 437, 1308, 1303, 1309], 287)
femmesh.addFace([204, 686, 442, 1310, 1311, 1312], 288)
femmesh.addFace([258, 783, 718, 1313, 1314, 1301], 289)
femmesh.addFace([575, 828, 374, 1315, 1316, 1209], 290)
femmesh.addFace([141, 604, 142, 892, 1317, 168], 291)
femmesh.addFace([450, 789, 388, 1318, 1202, 1289], 292)
femmesh.addFace([243, 800, 400, 1149, 1319, 1320], 293)
femmesh.addFace([372, 716, 314, 1321, 1322, 1145], 294)
femmesh.addFace([248, 696, 463, 1323, 1324, 1085], 295)
femmesh.addFace([281, 494, 433, 1325, 1326, 1327], 296)
femmesh.addFace([366, 623, 236, 1328, 1329, 1253], 297)
femmesh.addFace([367, 594, 250, 1330, 969, 1002], 298)
femmesh.addFace([424, 559, 247, 1331, 1332, 1333], 299)
femmesh.addFace([306, 559, 424, 1334, 1331, 1335], 300)
femmesh.addFace([433, 494, 214, 1326, 1336, 1337], 301)
femmesh.addFace([397, 443, 227, 935, 1338, 1339], 302)
femmesh.addFace([474, 686, 204, 1340, 1310, 1341], 303)
femmesh.addFace([242, 481, 413, 1342, 1343, 1344], 304)
femmesh.addFace([371, 529, 262, 1345, 1346, 898], 305)
femmesh.addFace([411, 645, 302, 1347, 1348, 1349], 306)
femmesh.addFace([364, 581, 201, 1350, 1185, 1098], 307)
femmesh.addFace([351, 619, 607, 1351, 1352, 1353], 308)
femmesh.addFace([287, 829, 491, 1354, 1355, 979], 309)
femmesh.addFace([369, 670, 287, 1187, 1356, 981], 310)
femmesh.addFace([56, 646, 55, 889, 1357, 82], 311)
femmesh.addFace([202, 654, 395, 1358, 1359, 1360], 312)
femmesh.addFace([387, 722, 281, 1111, 1361, 1362], 313)
femmesh.addFace([231, 539, 373, 1363, 1364, 1365], 314)
femmesh.addFace([387, 804, 193, 1366, 1367, 957], 315)
femmesh.addFace([413, 459, 242, 1368, 1089, 1344], 316)
femmesh.addFace([358, 584, 487, 1369, 1370, 859], 317)
femmesh.addFace([434, 593, 239, 1371, 1372, 1373], 318)
femmesh.addFace([377, 503, 277, 1300, 1374, 1375], 319)
femmesh.addFace([424, 606, 306, 972, 1376, 1335], 320)
femmesh.addFace([239, 500, 434, 883, 1377, 1373], 321)
femmesh.addFace([370, 703, 310, 1378, 1379, 1101], 322)
femmesh.addFace([468, 501, 189, 1380, 1381, 1382], 323)
femmesh.addFace([289, 501, 468, 1383, 1380, 1384], 324)
femmesh.addFace([203, 757, 429, 1385, 1150, 1016], 325)
femmesh.addFace([369, 582, 480, 1386, 1387, 996], 326)
femmesh.addFace([344, 757, 562, 1151, 1388, 1389], 327)
femmesh.addFace([286, 582, 369, 1390, 1386, 1227], 328)
femmesh.addFace([434, 737, 593, 1391, 1392, 1371], 329)
femmesh.addFace([405, 490, 279, 1393, 1394, 1395], 330)
femmesh.addFace([234, 672, 385, 1396, 1397, 1157], 331)
femmesh.addFace([232, 573, 370, 1398, 1399, 1128], 332)
femmesh.addFace([326, 711, 443, 1400, 1401, 1402], 333)
femmesh.addFace([10, 595, 11, 905, 1403, 30], 334)
femmesh.addFace([110, 729, 493, 1404, 1405, 1406], 335)
femmesh.addFace([443, 773, 326, 1407, 1408, 1402], 336)
femmesh.addFace([229, 529, 371, 1409, 1345, 1410], 337)
femmesh.addFace([350, 724, 461, 902, 1411, 923], 338)
femmesh.addFace([260, 488, 455, 1412, 1413, 1159], 339)
femmesh.addFace([318, 663, 563, 1414, 986, 1415], 340)
femmesh.addFace([429, 665, 240, 1134, 1041, 1416], 341)
femmesh.addFace([495, 717, 343, 1181, 1417, 933], 342)
femmesh.addFace([467, 808, 247, 1418, 1419, 990], 343)
femmesh.addFace([457, 681, 508, 1420, 1421, 1210], 344)
femmesh.addFace([264, 520, 383, 1080, 1422, 1423], 345)
femmesh.addFace([205, 558, 371, 1424, 1425, 908], 346)
femmesh.addFace([69, 635, 68, 1247, 1287, 95], 347)
femmesh.addFace([371, 534, 229, 1426, 1427, 1410], 348)
femmesh.addFace([373, 636, 231, 1428, 1034, 1365], 349)
femmesh.addFace([389, 567, 538, 1194, 1429, 1430], 350)
femmesh.addFace([354, 531, 448, 1026, 1431, 1432], 351)
femmesh.addFace([484, 639, 323, 1433, 1434, 1435], 352)
femmesh.addFace([242, 690, 439, 1088, 1436, 1437], 353)
femmesh.addFace([328, 487, 453, 1438, 1439, 1440], 354)
femmesh.addFace([484, 769, 639, 1441, 1442, 1433], 355)
femmesh.addFace([452, 466, 276, 1443, 1444, 1445], 356)
femmesh.addFace([208, 466, 452, 939, 1443, 1232], 357)
femmesh.addFace([302, 672, 411, 1446, 1447, 1349], 358)
femmesh.addFace([440, 469, 272, 1448, 926, 1449], 359)
femmesh.addFace([191, 469, 440, 1450, 1448, 1451], 360)
femmesh.addFace([213, 565, 423, 1452, 1453, 1454], 361)
femmesh.addFace([423, 565, 301, 1453, 1455, 1456], 362)
femmesh.addFace([562, 701, 344, 1457, 1458, 1389], 363)
femmesh.addFace([679, 779, 234, 1459, 1460, 1156], 364)
femmesh.addFace([607, 619, 291, 1352, 1461, 1462], 365)
femmesh.addFace([468, 728, 335, 1463, 1464, 1465], 366)
femmesh.addFace([431, 583, 280, 1466, 1467, 921], 367)
femmesh.addFace([427, 735, 235, 1077, 1468, 1469], 368)
femmesh.addFace([460, 550, 273, 1470, 1163, 1471], 369)
femmesh.addFace([200, 727, 634, 1472, 1473, 1140], 370)
femmesh.addFace([193, 804, 398, 1367, 1474, 1475], 371)
femmesh.addFace([279, 643, 374, 1476, 1477, 1240], 372)
femmesh.addFace([438, 819, 207, 1478, 1235, 1479], 373)
femmesh.addFace([383, 520, 213, 1422, 1480, 1229], 374)
femmesh.addFace([575, 790, 288, 1208, 1481, 1482], 375)
femmesh.addFace([59, 530, 58, 1483, 1260, 85], 376)
femmesh.addFace([674, 816, 372, 1484, 871, 1144], 377)
femmesh.addFace([137, 412, 138, 1485, 1486, 164], 378)
femmesh.addFace([396, 495, 259, 1487, 932, 1488], 379)
femmesh.addFace([202, 795, 654, 1489, 1490, 1358], 380)
femmesh.addFace([99, 495, 98, 1182, 1491, 118], 381)
femmesh.addFace([161, 580, 4, 1492, 1493, 188], 382)
femmesh.addFace([690, 847, 439, 1494, 1495, 1436], 383)
femmesh.addFace([377, 544, 282, 1496, 1497, 1075], 384)
femmesh.addFace([268, 530, 386, 1498, 1499, 1500], 385)
femmesh.addFace([21, 415, 22, 1501, 1502, 41], 386)
femmesh.addFace([6, 414, 7, 1503, 1504, 26], 387)
femmesh.addFace([390, 770, 285, 1505, 1506, 1507], 388)
femmesh.addFace([328, 795, 487, 1508, 1509, 1438], 389)
femmesh.addFace([261, 609, 516, 1510, 1511, 1512], 390)
femmesh.addFace([4, 580, 115, 1493, 1513, 135], 391)
femmesh.addFace([425, 622, 308, 1176, 1514, 1515], 392)
femmesh.addFace([620, 665, 339, 1042, 1516, 1517], 393)
femmesh.addFace([379, 768, 207, 1518, 1519, 1237], 394)
femmesh.addFace([516, 609, 345, 1511, 982, 1055], 395)
femmesh.addFace([261, 510, 436, 1520, 1521, 1522], 396)
femmesh.addFace([378, 631, 145, 1282, 1523, 1524], 397)
femmesh.addFace([378, 623, 267, 1525, 1526, 1283], 398)
femmesh.addFace([212, 483, 458, 1527, 1528, 1529], 399)
femmesh.addFace([562, 757, 380, 1388, 1530, 1531], 400)
femmesh.addFace([385, 527, 200, 1532, 1533, 1142], 401)
femmesh.addFace([108, 436, 107, 1534, 1535, 127], 402)
femmesh.addFace([277, 633, 377, 1536, 1537, 1375], 403)
femmesh.addFace([145, 687, 378, 1538, 1539, 1524], 404)
femmesh.addFace([46, 695, 381, 1540, 1541, 1064], 405)
femmesh.addFace([386, 524, 268, 1542, 1008, 1500], 406)
femmesh.addFace([12, 659, 394, 1543, 1544, 1153], 407)
femmesh.addFace([49, 778, 48, 1545, 966, 75], 408)
femmesh.addFace([664, 838, 370, 1546, 1547, 1100], 409)
femmesh.addFace([305, 513, 437, 1548, 1549, 1305], 410)
femmesh.addFace([437, 513, 237, 1549, 1550, 1551], 411)
femmesh.addFace([105, 432, 104, 1552, 1553, 124], 412)
femmesh.addFace([257, 543, 402, 1554, 1555, 909], 413)
femmesh.addFace([46, 535, 45, 1063, 1556, 72], 414)
femmesh.addFace([581, 750, 254, 1557, 1558, 1559], 415)
femmesh.addFace([367, 669, 513, 1001, 1560, 1561], 416)
femmesh.addFace([618, 620, 339, 1562, 1517, 1563], 417)
femmesh.addFace([427, 557, 293, 1564, 1565, 1566], 418)
femmesh.addFace([513, 669, 237, 1560, 1567, 1550], 419)
femmesh.addFace([480, 582, 191, 1387, 1568, 1569], 420)
femmesh.addFace([235, 557, 427, 1570, 1564, 1469], 421)
femmesh.addFace([428, 613, 206, 1571, 1572, 1036], 422)
femmesh.addFace([395, 816, 202, 899, 1573, 1360], 423)
femmesh.addFace([295, 647, 611, 1574, 1575, 1124], 424)
femmesh.addFace([423, 506, 236, 1576, 1251, 1577], 425)
femmesh.addFace([243, 834, 535, 1578, 1579, 1138], 426)
femmesh.addFace([301, 506, 423, 1580, 1576, 1456], 427)
femmesh.addFace([270, 800, 695, 1581, 1582, 1583], 428)
femmesh.addFace([242, 837, 481, 1584, 1585, 1342], 429)
femmesh.addFace([383, 686, 264, 1586, 1587, 1423], 430)
femmesh.addFace([363, 808, 467, 1588, 1418, 1589], 431)
femmesh.addFace([217, 663, 484, 987, 1590, 1591], 432)
femmesh.addFace([110, 493, 109, 1406, 1592, 129], 433)
femmesh.addFace([335, 760, 468, 1593, 1594, 1465], 434)
femmesh.addFace([443, 711, 227, 1401, 1595, 1338], 435)
femmesh.addFace([434, 500, 333, 1377, 1131, 1277], 436)
femmesh.addFace([646, 759, 55, 1596, 1597, 1357], 437)
femmesh.addFace([399, 778, 50, 1598, 1599, 1600], 438)
femmesh.addFace([259, 635, 396, 1601, 1246, 1488], 439)
femmesh.addFace([393, 618, 339, 1602, 1563, 1094], 440)
femmesh.addFace([493, 729, 346, 1405, 1603, 1604], 441)
femmesh.addFace([611, 647, 368, 1575, 1605, 1606], 442)
femmesh.addFace([388, 728, 189, 1607, 1608, 1290], 443)
femmesh.addFace([53, 759, 445, 1609, 1610, 1611], 444)
femmesh.addFace([55, 759, 54, 1597, 1612, 81], 445)
femmesh.addFace([211, 490, 404, 1613, 1614, 1615], 446)
femmesh.addFace([251, 634, 509, 1616, 1617, 1618], 447)
femmesh.addFace([325, 801, 432, 1619, 1620, 1621], 448)
femmesh.addFace([324, 710, 386, 1622, 1623, 1624], 449)
femmesh.addFace([206, 744, 389, 1625, 1626, 1037], 450)
femmesh.addFace([460, 648, 550, 1627, 1628, 1470], 451)
femmesh.addFace([402, 547, 260, 1629, 1630, 919], 452)
femmesh.addFace([281, 722, 494, 1361, 1631, 1325], 453)
femmesh.addFace([275, 775, 409, 1632, 1633, 1052], 454)
femmesh.addFace([515, 840, 230, 1634, 1635, 854], 455)
femmesh.addFace([214, 505, 433, 1636, 1637, 1337], 456)
femmesh.addFace([386, 612, 324, 1638, 1639, 1624], 457)
femmesh.addFace([297, 653, 472, 1640, 1641, 1642], 458)
femmesh.addFace([264, 544, 498, 1643, 1644, 1081], 459)
femmesh.addFace([671, 698, 451, 1645, 1646, 1647], 460)
femmesh.addFace([450, 753, 263, 1648, 1649, 1650], 461)
femmesh.addFace([215, 629, 446, 1651, 1652, 1653], 462)
femmesh.addFace([240, 607, 429, 1654, 1655, 1416], 463)
femmesh.addFace([271, 749, 388, 1656, 1657, 1204], 464)
femmesh.addFace([416, 635, 259, 1166, 1601, 1658], 465)
femmesh.addFace([194, 649, 478, 1659, 1660, 1661], 466)
femmesh.addFace([478, 649, 331, 1660, 1662, 1663], 467)
femmesh.addFace([465, 631, 267, 1664, 1281, 1665], 468)
femmesh.addFace([389, 538, 271, 1430, 1666, 1031], 469)
femmesh.addFace([69, 796, 396, 1667, 1668, 1248], 470)
femmesh.addFace([390, 599, 65, 1669, 1670, 1671], 471)
femmesh.addFace([416, 540, 285, 1672, 1673, 1674], 472)
femmesh.addFace([281, 776, 387, 1675, 1676, 1362], 473)
femmesh.addFace([299, 655, 447, 1677, 1678, 1679], 474)
femmesh.addFace([19, 463, 20, 1222, 1680, 39], 475)
femmesh.addFace([259, 540, 416, 1681, 1672, 1658], 476)
femmesh.addFace([54, 759, 53, 1612, 1609, 80], 477)
femmesh.addFace([226, 615, 393, 1162, 1682, 1096], 478)
femmesh.addFace([209, 645, 411, 1683, 1347, 1684], 479)
femmesh.addFace([254, 702, 581, 992, 1685, 1559], 480)
femmesh.addFace([270, 695, 502, 1583, 1686, 1687], 481)
femmesh.addFace([432, 801, 553, 1620, 1688, 1689], 482)
femmesh.addFace([232, 772, 391, 1690, 1691, 1692], 483)
femmesh.addFace([303, 627, 545, 1152, 952, 1693], 484)
femmesh.addFace([454, 613, 263, 1694, 1695, 1696], 485)
femmesh.addFace([509, 634, 332, 1617, 1697, 1698], 486)
femmesh.addFace([222, 547, 402, 1699, 1629, 1700], 487)
femmesh.addFace([317, 613, 454, 1701, 1694, 1702], 488)
femmesh.addFace([283, 578, 511, 1703, 1704, 1705], 489)
femmesh.addFace([436, 510, 107, 1521, 1706, 1535], 490)
femmesh.addFace([220, 828, 575, 1707, 1315, 1708], 491)
femmesh.addFace([402, 543, 222, 1555, 1709, 1700], 492)
femmesh.addFace([268, 676, 470, 1039, 1710, 1056], 493)
femmesh.addFace([448, 531, 255, 1431, 1711, 1192], 494)
femmesh.addFace([305, 590, 513, 1712, 1713, 1548], 495)
femmesh.addFace([285, 770, 579, 1506, 1714, 1715], 496)
femmesh.addFace([196, 556, 492, 1716, 1717, 1718], 497)
femmesh.addFace([418, 559, 306, 1719, 1334, 1720], 498)
femmesh.addFace([588, 653, 297, 1721, 1640, 1722], 499)
femmesh.addFace([492, 556, 313, 1717, 1723, 1724], 500)
femmesh.addFace([609, 757, 203, 1725, 1385, 983], 501)
femmesh.addFace([487, 584, 256, 1370, 1726, 1727], 502)
femmesh.addFace([552, 753, 450, 1728, 1648, 1729], 503)
femmesh.addFace([444, 489, 256, 943, 1730, 1731], 504)
femmesh.addFace([420, 522, 249, 1732, 1733, 1734], 505)
femmesh.addFace([263, 753, 454, 1649, 1735, 1696], 506)
femmesh.addFace([400, 519, 243, 1736, 1737, 1320], 507)
femmesh.addFace([226, 701, 562, 1095, 1457, 1738], 508)
femmesh.addFace([590, 844, 513, 1739, 1740, 1713], 509)
femmesh.addFace([278, 555, 398, 1091, 1741, 1742], 510)
femmesh.addFace([546, 782, 232, 1743, 1744, 1127], 511)
femmesh.addFace([398, 555, 193, 1741, 1745, 1475], 512)
femmesh.addFace([248, 523, 421, 1746, 1747, 1748], 513)
femmesh.addFace([389, 744, 274, 1626, 1749, 1195], 514)
femmesh.addFace([453, 487, 256, 1439, 1727, 1750], 515)
femmesh.addFace([189, 728, 468, 1608, 1463, 1382], 516)
femmesh.addFace([255, 851, 403, 1751, 1752, 1753], 517)
femmesh.addFace([218, 814, 403, 1754, 1755, 1756], 518)
femmesh.addFace([451, 556, 196, 1757, 1716, 1758], 519)
femmesh.addFace([293, 556, 451, 1759, 1757, 1760], 520)
femmesh.addFace([311, 680, 430, 1761, 1021, 946], 521)
femmesh.addFace([421, 532, 309, 1762, 1763, 1764], 522)
femmesh.addFace([108, 625, 436, 1765, 1766, 1534], 523)
femmesh.addFace([399, 712, 276, 1767, 1768, 1769], 524)
femmesh.addFace([301, 598, 506, 1770, 1771, 1580], 525)
femmesh.addFace([308, 519, 420, 1772, 1773, 1774], 526)
femmesh.addFace([200, 527, 408, 1533, 1775, 1776], 527)
femmesh.addFace([478, 543, 257, 1777, 1554, 1778], 528)
femmesh.addFace([627, 671, 196, 1119, 1779, 1780], 529)
femmesh.addFace([408, 527, 302, 1775, 1781, 1045], 530)
femmesh.addFace([531, 655, 362, 1782, 1783, 1784], 531)
femmesh.addFace([412, 688, 606, 1785, 1786, 1787], 532)
femmesh.addFace([249, 522, 433, 1733, 1788, 1789], 533)
femmesh.addFace([391, 573, 232, 1790, 1398, 1692], 534)
femmesh.addFace([228, 655, 531, 1791, 1782, 1116], 535)
femmesh.addFace([534, 830, 756, 1792, 1793, 1794], 536)
femmesh.addFace([63, 456, 62, 1795, 1796, 89], 537)
femmesh.addFace([277, 573, 391, 1797, 1790, 1798], 538)
femmesh.addFace([488, 673, 455, 1799, 1800, 1413], 539)
femmesh.addFace([209, 726, 457, 1801, 1802, 1212], 540)
femmesh.addFace([452, 709, 407, 1803, 1804, 1233], 541)
femmesh.addFace([271, 538, 401, 1666, 1805, 1806], 542)
femmesh.addFace([406, 570, 274, 1807, 1230, 1808], 543)
femmesh.addFace([416, 579, 67, 1809, 1810, 1167], 544)
femmesh.addFace([401, 749, 271, 1811, 1656, 1806], 545)
femmesh.addFace([553, 578, 283, 1812, 1703, 1813], 546)
femmesh.addFace([159, 499, 160, 1814, 1815, 186], 547)
femmesh.addFace([435, 523, 248, 1816, 1746, 1817], 548)
femmesh.addFace([50, 778, 49, 1599, 1545, 76], 549)
femmesh.addFace([196, 671, 451, 1779, 1647, 1758], 550)
femmesh.addFace([613, 719, 206, 1818, 1819, 1572], 551)
femmesh.addFace([114, 526, 113, 1820, 874, 133], 552)
femmesh.addFace([146, 465, 147, 1821, 1822, 173], 553)
femmesh.addFace([227, 678, 397, 1823, 1824, 1339], 554)
femmesh.addFace([397, 678, 312, 1824, 1825, 1826], 555)
femmesh.addFace([380, 667, 562, 1827, 1828, 1531], 556)
femmesh.addFace([294, 603, 392, 1829, 1830, 1831], 557)
femmesh.addFace([562, 667, 298, 1828, 1832, 1833], 558)
femmesh.addFace([463, 621, 20, 1834, 1835, 1680], 559)
femmesh.addFace([364, 750, 581, 1836, 1557, 1350], 560)
femmesh.addFace([197, 620, 618, 1837, 1562, 1838], 561)
femmesh.addFace([150, 640, 419, 1839, 1840, 1841], 562)
femmesh.addFace([279, 794, 405, 1842, 1843, 1395], 563)
femmesh.addFace([295, 662, 560, 1844, 1845, 1846], 564)
femmesh.addFace([391, 633, 277, 1847, 1536, 1798], 565)
femmesh.addFace([392, 687, 144, 1848, 1849, 1020], 566)
femmesh.addFace([105, 723, 432, 1850, 1851, 1552], 567)
femmesh.addFace([159, 713, 499, 1852, 1853, 1814], 568)
femmesh.addFace([391, 583, 192, 1854, 1855, 1856], 569)
femmesh.addFace([261, 802, 510, 1857, 1858, 1520], 570)
femmesh.addFace([221, 691, 453, 1859, 1860, 1861], 571)
femmesh.addFace([461, 561, 216, 1862, 1863, 924], 572)
femmesh.addFace([453, 691, 328, 1860, 1864, 1440], 573)
femmesh.addFace([408, 809, 200, 1865, 1866, 1776], 574)
femmesh.addFace([521, 775, 275, 1867, 1632, 1168], 575)
femmesh.addFace([65, 770, 390, 1868, 1505, 1671], 576)
femmesh.addFace([454, 753, 536, 1735, 1869, 1870], 577)
femmesh.addFace([401, 538, 216, 1805, 1871, 1872], 578)
femmesh.addFace([148, 497, 149, 1873, 1874, 175], 579)
femmesh.addFace([426, 617, 365, 1875, 1876, 930], 580)
femmesh.addFace([157, 475, 158, 1877, 1878, 184], 581)
femmesh.addFace([403, 781, 218, 1879, 1880, 1756], 582)
femmesh.addFace([598, 836, 506, 1881, 1882, 1771], 583)
femmesh.addFace([412, 606, 138, 1787, 1883, 1486], 584)
femmesh.addFace([560, 662, 360, 1845, 1884, 1885], 585)
femmesh.addFace([476, 545, 265, 1886, 954, 1887], 586)
femmesh.addFace([409, 530, 268, 1261, 1498, 1057], 587)
femmesh.addFace([629, 825, 446, 1888, 1889, 1652], 588)
femmesh.addFace([411, 672, 234, 1447, 1396, 1890], 589)
femmesh.addFace([404, 568, 211, 1891, 1892, 1615], 590)
femmesh.addFace([544, 784, 498, 1893, 1894, 1644], 591)
femmesh.addFace([285, 697, 390, 1895, 1896, 1507], 592)
femmesh.addFace([413, 828, 220, 1897, 1707, 1898], 593)
femmesh.addFace([280, 568, 404, 1899, 1891, 922], 594)
femmesh.addFace([584, 588, 297, 1900, 1722, 1901], 595)
femmesh.addFace([661, 739, 361, 1902, 1903, 1904], 596)
femmesh.addFace([192, 633, 391, 1905, 1847, 1856], 597)
femmesh.addFace([414, 622, 7, 1906, 1907, 1504], 598)
femmesh.addFace([21, 621, 415, 1908, 1909, 1501], 599)
femmesh.addFace([358, 588, 584, 1910, 1900, 1369], 600)
femmesh.addFace([392, 652, 294, 1214, 1911, 1831], 601)
femmesh.addFace([274, 797, 406, 1912, 1913, 1808], 602)
femmesh.addFace([428, 789, 263, 1914, 1915, 1916], 603)
femmesh.addFace([440, 851, 255, 1917, 1751, 1918], 604)
femmesh.addFace([318, 563, 497, 1415, 1919, 1920], 605)
femmesh.addFace([405, 812, 490, 1921, 1922, 1393], 606)
femmesh.addFace([497, 563, 284, 1919, 1923, 1924], 607)
femmesh.addFace([606, 688, 306, 1786, 1925, 1376], 608)
femmesh.addFace([535, 834, 375, 1579, 1926, 1927], 609)
femmesh.addFace([256, 584, 444, 1726, 1928, 1731], 610)
femmesh.addFace([444, 584, 297, 1928, 1901, 1929], 611)
femmesh.addFace([394, 656, 237, 1930, 1931, 1932], 612)
femmesh.addFace([244, 532, 418, 1933, 1934, 1935], 613)
femmesh.addFace([296, 656, 394, 1936, 1930, 1937], 614)
femmesh.addFace([237, 758, 394, 1938, 1939, 1932], 615)
femmesh.addFace([407, 709, 241, 1804, 1940, 1941], 616)
femmesh.addFace([436, 625, 298, 1766, 1942, 1943], 617)
femmesh.addFace([297, 642, 444, 1944, 940, 1929], 618)
femmesh.addFace([303, 545, 407, 1693, 1945, 1946], 619)
femmesh.addFace([233, 551, 408, 1947, 1948, 1049], 620)
femmesh.addFace([542, 668, 329, 1949, 1950, 1951], 621)
femmesh.addFace([433, 522, 281, 1788, 1952, 1327], 622)
femmesh.addFace([205, 792, 420, 1953, 1954, 1955], 623)
femmesh.addFace([142, 652, 143, 1956, 1213, 169], 624)
femmesh.addFace([410, 741, 322, 1957, 1958, 1959], 625)
femmesh.addFace([238, 741, 410, 1960, 1957, 867], 626)
femmesh.addFace([231, 743, 539, 1961, 1224, 1363], 627)
femmesh.addFace([542, 840, 668, 1962, 1963, 1949], 628)
femmesh.addFace([200, 809, 448, 1866, 1964, 1965], 629)
femmesh.addFace([406, 797, 238, 1913, 1966, 1967], 630)
femmesh.addFace([448, 809, 354, 1964, 1968, 1432], 631)
femmesh.addFace([282, 523, 435, 1969, 1816, 1970], 632)
femmesh.addFace([421, 788, 204, 1971, 1972, 1973], 633)
femmesh.addFace([322, 546, 410, 1974, 1975, 1959], 634)
femmesh.addFace([432, 723, 325, 1851, 1976, 1621], 635)
femmesh.addFace([222, 641, 547, 1274, 1977, 1699], 636)
femmesh.addFace([385, 672, 527, 1397, 1978, 1532], 637)
femmesh.addFace([482, 740, 719, 1979, 1980, 1981], 638)
femmesh.addFace([457, 726, 540, 1802, 1982, 1983], 639)
femmesh.addFace([276, 566, 399, 1984, 1985, 1769], 640)
femmesh.addFace([260, 547, 488, 1630, 1986, 1412], 641)
femmesh.addFace([380, 757, 609, 1530, 1725, 1987], 642)
femmesh.addFace([336, 793, 496, 1988, 1989, 1015], 643)
femmesh.addFace([655, 767, 447, 1990, 1991, 1678], 644)
femmesh.addFace([394, 616, 13, 1992, 1993, 1154], 645)
femmesh.addFace([524, 781, 352, 1994, 1995, 1009], 646)
femmesh.addFace([193, 570, 406, 1996, 1807, 1997], 647)
femmesh.addFace([263, 789, 450, 1915, 1318, 1650], 648)
femmesh.addFace([507, 549, 292, 1998, 1999, 2000], 649)
femmesh.addFace([394, 659, 296, 1544, 2001, 1937], 650)
femmesh.addFace([305, 549, 507, 1304, 1998, 2002], 651)
femmesh.addFace([238, 797, 417, 1966, 2003, 2004], 652)
femmesh.addFace([404, 812, 246, 2005, 2006, 1296], 653)
femmesh.addFace([373, 539, 479, 1364, 2007, 1218], 654)
femmesh.addFace([211, 790, 643, 2008, 2009, 2010], 655)
femmesh.addFace([419, 704, 150, 2011, 2012, 1841], 656)
femmesh.addFace([191, 582, 469, 1568, 2013, 1450], 657)
femmesh.addFace([395, 654, 257, 1359, 2014, 911], 658)
femmesh.addFace([155, 642, 472, 2015, 2016, 2017], 659)
femmesh.addFace([405, 794, 198, 1843, 2018, 2019], 660)
femmesh.addFace([274, 744, 417, 1749, 2020, 2021], 661)
femmesh.addFace([309, 621, 463, 2022, 1834, 2023], 662)
femmesh.addFace([322, 740, 482, 2024, 1979, 2025], 663)
femmesh.addFace([466, 566, 276, 2026, 1984, 1444], 664)
femmesh.addFace([536, 790, 211, 2027, 2008, 2028], 665)
femmesh.addFace([483, 750, 458, 2029, 2030, 1528], 666)
femmesh.addFace([430, 636, 291, 1035, 2031, 2032], 667)
femmesh.addFace([417, 797, 274, 2003, 1912, 2021], 668)
femmesh.addFace([379, 678, 571, 2033, 2034, 2035], 669)
femmesh.addFace([107, 510, 106, 1706, 2036, 126], 670)
femmesh.addFace([317, 528, 482, 2037, 2038, 2039], 671)
femmesh.addFace([220, 748, 413, 2040, 2041, 1898], 672)
femmesh.addFace([413, 748, 321, 2041, 2042, 2043], 673)
femmesh.addFace([695, 800, 381, 1582, 1148, 1541], 674)
femmesh.addFace([398, 771, 278, 2044, 2045, 1742], 675)
femmesh.addFace([607, 791, 429, 2046, 2047, 1655], 676)
femmesh.addFace([508, 517, 320, 2048, 958, 2049], 677)
femmesh.addFace([265, 561, 461, 2050, 1862, 2051], 678)
femmesh.addFace([604, 652, 142, 2052, 1956, 1317], 679)
femmesh.addFace([478, 647, 194, 2053, 2054, 1661], 680)
femmesh.addFace([468, 760, 252, 1594, 2055, 2056], 681)
femmesh.addFace([312, 698, 671, 2057, 1645, 2058], 682)
femmesh.addFace([465, 663, 318, 2059, 1414, 2060], 683)
femmesh.addFace([57, 521, 56, 2061, 1106, 83], 684)
femmesh.addFace([50, 712, 399, 2062, 1767, 1600], 685)
femmesh.addFace([403, 591, 255, 2063, 1190, 1753], 686)
femmesh.addFace([351, 607, 515, 1353, 2064, 853], 687)
femmesh.addFace([310, 658, 594, 2065, 949, 2066], 688)
femmesh.addFace([204, 807, 421, 2067, 2068, 1973], 689)
femmesh.addFace([533, 827, 272, 1269, 2069, 1069], 690)
femmesh.addFace([321, 715, 413, 2070, 2071, 2043], 691)
femmesh.addFace([137, 762, 412, 2072, 1254, 1485], 692)
femmesh.addFace([593, 737, 331, 1392, 2073, 2074], 693)
femmesh.addFace([312, 671, 397, 2058, 1118, 1826], 694)
femmesh.addFace([534, 756, 229, 1794, 2075, 1427], 695)
femmesh.addFace([207, 676, 533, 2076, 1200, 2077], 696)
femmesh.addFace([198, 598, 405, 2078, 2079, 2019], 697)
femmesh.addFace([571, 678, 227, 2034, 1823, 2080], 698)
femmesh.addFace([365, 720, 706, 2081, 2082, 2083], 699)
femmesh.addFace([285, 579, 416, 1715, 1809, 1674], 700)
femmesh.addFace([210, 586, 427, 2084, 1297, 2085], 701)
femmesh.addFace([205, 841, 558, 2086, 2087, 1424], 702)
femmesh.addFace([499, 713, 336, 1853, 2088, 2089], 703)
femmesh.addFace([756, 830, 347, 1793, 2090, 2091], 704)
femmesh.addFace([202, 816, 674, 1573, 1484, 912], 705)
femmesh.addFace([216, 682, 401, 2092, 2093, 1872], 706)
femmesh.addFace([419, 640, 284, 1840, 2094, 963], 707)
femmesh.addFace([413, 715, 459, 2071, 2095, 1368], 708)
femmesh.addFace([248, 684, 435, 2096, 2097, 1817], 709)
femmesh.addFace([432, 553, 283, 1689, 1813, 2098], 710)
femmesh.addFace([267, 663, 465, 2099, 2059, 1665], 711)
femmesh.addFace([406, 608, 193, 2100, 955, 1997], 712)
femmesh.addFace([11, 659, 12, 2101, 1543, 31], 713)
femmesh.addFace([6, 751, 414, 2102, 2103, 1503], 714)
femmesh.addFace([415, 752, 22, 2104, 2105, 1502], 715)
femmesh.addFace([48, 502, 47, 995, 2106, 74], 716)
femmesh.addFace([199, 590, 410, 2107, 1011, 2108], 717)
femmesh.addFace([558, 841, 400, 2087, 2109, 2110], 718)
femmesh.addFace([460, 666, 195, 2111, 2112, 2113], 719)
femmesh.addFace([407, 683, 303, 2114, 1188, 1946], 720)
femmesh.addFace([241, 683, 407, 2115, 2114, 1941], 721)
femmesh.addFace([454, 738, 317, 2116, 2117, 1702], 722)
femmesh.addFace([410, 838, 199, 2118, 2119, 2108], 723)
femmesh.addFace([520, 565, 213, 2120, 1452, 1480], 724)
femmesh.addFace([414, 774, 622, 2121, 2122, 1906], 725)
femmesh.addFace([315, 565, 520, 2123, 2120, 1136], 726)
femmesh.addFace([422, 597, 300, 2124, 2125, 2126], 727)
femmesh.addFace([411, 697, 285, 2127, 1895, 2128], 728)
femmesh.addFace([285, 726, 411, 2129, 2130, 2128], 729)
femmesh.addFace([456, 780, 349, 2131, 2132, 1280], 730)
femmesh.addFace([18, 597, 422, 2133, 2124, 1267], 731)
femmesh.addFace([306, 688, 418, 1925, 2134, 1720], 732)
femmesh.addFace([238, 694, 406, 2135, 2136, 1967], 733)
femmesh.addFace([409, 775, 58, 1633, 2137, 1262], 734)
femmesh.addFace([418, 688, 244, 2134, 2138, 1935], 735)
femmesh.addFace([272, 851, 440, 2139, 1917, 1449], 736)
femmesh.addFace([746, 751, 5, 2140, 2141, 2142], 737)
femmesh.addFace([23, 752, 745, 2143, 2144, 2145], 738)
femmesh.addFace([292, 549, 494, 1999, 2146, 2147], 739)
femmesh.addFace([481, 828, 413, 2148, 1897, 1343], 740)
femmesh.addFace([547, 641, 334, 1977, 2149, 2150], 741)
femmesh.addFace([494, 549, 214, 2146, 1308, 1336], 742)
femmesh.addFace([411, 726, 209, 2130, 1801, 1684], 743)
femmesh.addFace([407, 842, 208, 2151, 2152, 1234], 744)
femmesh.addFace([596, 661, 361, 2153, 1904, 2154], 745)
femmesh.addFace([454, 536, 211, 1870, 2028, 2155], 746)
femmesh.addFace([417, 741, 238, 2156, 1960, 2004], 747)
femmesh.addFace([492, 561, 265, 2157, 2050, 1266], 748)
femmesh.addFace([507, 590, 305, 1012, 1712, 2002], 749)
femmesh.addFace([558, 800, 270, 2158, 1581, 2159], 750)
femmesh.addFace([745, 762, 136, 2160, 2161, 2162], 751)
femmesh.addFace([234, 697, 411, 2163, 2127, 1890], 752)
femmesh.addFace([279, 837, 439, 2164, 2165, 2166], 753)
femmesh.addFace([247, 559, 442, 1332, 2167, 1104], 754)
femmesh.addFace([400, 800, 558, 1319, 2158, 2110], 755)
femmesh.addFace([215, 577, 503, 2168, 2169, 2170], 756)
femmesh.addFace([531, 848, 255, 2171, 2172, 1711], 757)
femmesh.addFace([207, 768, 470, 1519, 2173, 2174], 758)
femmesh.addFace([154, 642, 155, 941, 2015, 181], 759)
femmesh.addFace([112, 512, 111, 2175, 2176, 131], 760)
femmesh.addFace([476, 554, 208, 2177, 937, 2178], 761)
femmesh.addFace([430, 619, 190, 2179, 2180, 948], 762)
femmesh.addFace([291, 619, 430, 1461, 2179, 2032], 763)
femmesh.addFace([484, 677, 217, 2181, 2182, 1591], 764)
femmesh.addFace([323, 677, 484, 2183, 2181, 1435], 765)
femmesh.addFace([357, 840, 515, 2184, 1634, 1196], 766)
femmesh.addFace([431, 651, 192, 2185, 2186, 2187], 767)
femmesh.addFace([447, 767, 290, 1991, 2188, 2189], 768)
femmesh.addFace([424, 602, 140, 2190, 2191, 1072], 769)
femmesh.addFace([192, 583, 431, 1855, 1466, 2187], 770)
femmesh.addFace([441, 572, 239, 2192, 1219, 2193], 771)
femmesh.addFace([213, 638, 467, 2194, 2195, 1228], 772)
femmesh.addFace([198, 794, 439, 2018, 2196, 2197], 773)
femmesh.addFace([318, 707, 465, 2198, 2199, 2060], 774)
femmesh.addFace([189, 552, 450, 2200, 1729, 1288], 775)
femmesh.addFace([457, 540, 259, 1983, 1681, 857], 776)
femmesh.addFace([482, 528, 232, 2038, 2201, 2202], 777)
femmesh.addFace([269, 517, 508, 2203, 2048, 2204], 778)
femmesh.addFace([195, 739, 661, 2205, 1902, 2206], 779)
femmesh.addFace([426, 821, 617, 2207, 2208, 1875], 780)
femmesh.addFace([403, 814, 591, 1755, 2209, 2063], 781)
femmesh.addFace([244, 688, 412, 2138, 1785, 1199], 782)
femmesh.addFace([439, 837, 242, 2165, 1584, 1437], 783)
femmesh.addFace([332, 727, 591, 2210, 1216, 2211], 784)
femmesh.addFace([425, 589, 9, 2212, 2213, 1215], 785)
femmesh.addFace([440, 839, 191, 2214, 2215, 1451], 786)
femmesh.addFace([215, 733, 435, 2216, 2217, 2218], 787)
femmesh.addFace([419, 637, 327, 962, 2219, 2220], 788)
femmesh.addFace([289, 572, 441, 2221, 2192, 2222], 789)
femmesh.addFace([282, 544, 474, 1497, 2223, 2224], 790)
femmesh.addFace([504, 798, 103, 2225, 2226, 2227], 791)
femmesh.addFace([283, 632, 432, 2228, 2229, 2098], 792)
femmesh.addFace([236, 603, 423, 2230, 2231, 1577], 793)
femmesh.addFace([63, 780, 456, 2232, 2131, 1795], 794)
femmesh.addFace([304, 661, 596, 2233, 2153, 2234], 795)
femmesh.addFace([422, 684, 248, 2235, 2096, 1087], 796)
femmesh.addFace([623, 803, 236, 2236, 2237, 1329], 797)
femmesh.addFace([300, 684, 422, 2238, 2235, 2126], 798)
femmesh.addFace([327, 704, 419, 2239, 2011, 2220], 799)
femmesh.addFace([420, 841, 205, 2240, 2086, 1955], 800)
femmesh.addFace([582, 810, 469, 2241, 2242, 2013], 801)
femmesh.addFace([271, 789, 428, 1203, 1914, 1029], 802)
femmesh.addFace([442, 686, 383, 1311, 1586, 1105], 803)
femmesh.addFace([255, 848, 440, 2172, 2243, 1918], 804)
femmesh.addFace([474, 544, 264, 2223, 1643, 2244], 805)
femmesh.addFace([595, 659, 11, 2245, 2101, 1403], 806)
femmesh.addFace([309, 696, 421, 2246, 2247, 1764], 807)
femmesh.addFace([421, 696, 248, 2247, 1323, 1748], 808)
femmesh.addFace([429, 791, 373, 2047, 2248, 1017], 809)
femmesh.addFace([461, 724, 265, 1411, 2249, 2051], 810)
femmesh.addFace([479, 539, 273, 2007, 1250, 1164], 811)
femmesh.addFace([528, 772, 232, 2250, 1690, 2201], 812)
femmesh.addFace([289, 785, 501, 2251, 2252, 1383], 813)
femmesh.addFace([465, 707, 147, 2199, 2253, 1822], 814)
femmesh.addFace([459, 560, 360, 2254, 1885, 1084], 815)
femmesh.addFace([464, 586, 210, 2255, 2084, 2256], 816)
femmesh.addFace([149, 640, 150, 2257, 1839, 176], 817)
femmesh.addFace([263, 613, 428, 1695, 1571, 1916], 818)
femmesh.addFace([103, 798, 102, 2226, 2258, 122], 819)
femmesh.addFace([101, 518, 100, 2259, 2260, 120], 820)
femmesh.addFace([67, 579, 66, 1810, 2261, 93], 821)
femmesh.addFace([436, 667, 261, 2262, 2263, 1522], 822)
femmesh.addFace([298, 667, 436, 1832, 2262, 1943], 823)
femmesh.addFace([331, 543, 478, 2264, 1777, 1663], 824)
femmesh.addFace([293, 644, 427, 2265, 2266, 1566], 825)
femmesh.addFace([621, 811, 415, 2267, 2268, 1909], 826)
femmesh.addFace([427, 644, 210, 2266, 2269, 2085], 827)
femmesh.addFace([601, 612, 61, 2270, 2271, 2272], 828)
femmesh.addFace([365, 706, 496, 2083, 2273, 1205], 829)
femmesh.addFace([420, 700, 308, 2274, 2275, 1774], 830)
femmesh.addFace([249, 700, 420, 2276, 2274, 1734], 831)
femmesh.addFace([194, 560, 459, 2277, 2254, 2278], 832)
femmesh.addFace([423, 638, 213, 2279, 2194, 1454], 833)
femmesh.addFace([475, 713, 158, 2280, 2281, 1878], 834)
femmesh.addFace([447, 614, 212, 2282, 2283, 2284], 835)
femmesh.addFace([551, 675, 354, 2285, 1024, 2286], 836)
femmesh.addFace([290, 614, 447, 2287, 2282, 2189], 837)
femmesh.addFace([265, 724, 476, 2249, 2288, 1887], 838)
femmesh.addFace([326, 773, 445, 1408, 2289, 2290], 839)
femmesh.addFace([316, 675, 551, 2291, 2285, 2292], 840)
femmesh.addFace([447, 592, 299, 2293, 2294, 1679], 841)
femmesh.addFace([445, 773, 241, 2289, 2295, 2296], 842)
femmesh.addFace([212, 592, 447, 2297, 2293, 2284], 843)
femmesh.addFace([245, 698, 438, 2298, 2299, 2300], 844)
femmesh.addFace([65, 599, 64, 1670, 2301, 91], 845)
femmesh.addFace([278, 554, 476, 2302, 2177, 2303], 846)
femmesh.addFace([211, 568, 454, 1892, 2304, 2155], 847)
femmesh.addFace([218, 781, 524, 1880, 1994, 2305], 848)
femmesh.addFace([433, 731, 249, 2306, 2307, 1789], 849)
femmesh.addFace([478, 833, 647, 2308, 2309, 2053], 850)
femmesh.addFace([276, 712, 537, 1768, 2310, 2311], 851)
femmesh.addFace([420, 792, 522, 1954, 2312, 1732], 852)
femmesh.addFace([486, 563, 217, 2313, 988, 2314], 853)
femmesh.addFace([432, 632, 104, 2229, 2315, 1553], 854)
femmesh.addFace([490, 643, 279, 2316, 1476, 1394], 855)
femmesh.addFace([219, 574, 511, 2317, 2318, 2319], 856)
femmesh.addFace([468, 587, 289, 2320, 2321, 1384], 857)
femmesh.addFace([252, 587, 468, 1171, 2320, 2056], 858)
femmesh.addFace([247, 808, 424, 1419, 2322, 1333], 859)
femmesh.addFace([499, 600, 307, 2323, 2324, 2325], 860)
femmesh.addFace([439, 794, 279, 2196, 1842, 2166], 861)
femmesh.addFace([435, 629, 215, 2326, 1651, 2218], 862)
femmesh.addFace([308, 700, 425, 2275, 2327, 1515], 863)
femmesh.addFace([425, 700, 249, 2327, 2276, 2328], 864)
femmesh.addFace([281, 792, 473, 2329, 2330, 2331], 865)
femmesh.addFace([437, 721, 214, 2332, 2333, 1309], 866)
femmesh.addFace([473, 776, 281, 2334, 1675, 2331], 867)
femmesh.addFace([249, 813, 425, 2335, 2336, 2328], 868)
femmesh.addFace([622, 774, 308, 2122, 2337, 1514], 869)
femmesh.addFace([475, 793, 713, 2338, 2339, 2280], 870)
femmesh.addFace([581, 702, 333, 1685, 2340, 1130], 871)
femmesh.addFace([501, 552, 189, 2341, 2200, 1381], 872)
femmesh.addFace([438, 766, 245, 2342, 2343, 2300], 873)
femmesh.addFace([458, 750, 364, 2030, 1836, 1066], 874)
femmesh.addFace([474, 788, 282, 2344, 2345, 2224], 875)
femmesh.addFace([288, 552, 501, 2346, 2341, 2347], 876)
femmesh.addFace([523, 788, 421, 2348, 1971, 1747], 877)
femmesh.addFace([538, 567, 216, 1429, 1060, 1871], 878)
femmesh.addFace([488, 547, 334, 1986, 2150, 2349], 879)
femmesh.addFace([446, 825, 382, 1889, 2350, 1108], 880)
femmesh.addFace([481, 837, 279, 1585, 2164, 1239], 881)
femmesh.addFace([472, 734, 155, 2351, 2352, 2017], 882)
femmesh.addFace([222, 737, 434, 2353, 1391, 1275], 883)
femmesh.addFace([237, 656, 437, 1931, 2354, 1551], 884)
femmesh.addFace([253, 693, 458, 2355, 2356, 1065], 885)
femmesh.addFace([449, 610, 286, 2357, 2358, 1259], 886)
femmesh.addFace([246, 824, 431, 2359, 2360, 1295], 887)
femmesh.addFace([210, 644, 449, 2269, 2361, 1258], 888)
femmesh.addFace([435, 733, 282, 2217, 1073, 1970], 889)
femmesh.addFace([476, 842, 545, 2362, 2363, 1886], 890)
femmesh.addFace([542, 626, 314, 2364, 2365, 2366], 891)
femmesh.addFace([609, 667, 380, 2367, 1827, 1987], 892)
femmesh.addFace([329, 626, 542, 2368, 2364, 1951], 893)
femmesh.addFace([220, 785, 441, 2369, 2370, 2371], 894)
femmesh.addFace([261, 667, 609, 2263, 2367, 1510], 895)
femmesh.addFace([304, 648, 460, 2372, 1627, 2373], 896)
femmesh.addFace([245, 610, 449, 2374, 2357, 2375], 897)
femmesh.addFace([207, 823, 438, 2376, 2377, 1479], 898)
femmesh.addFace([445, 628, 53, 2378, 2379, 1611], 899)
femmesh.addFace([199, 838, 664, 2119, 1546, 2380], 900)
femmesh.addFace([441, 785, 289, 2370, 2251, 2222], 901)
femmesh.addFace([269, 806, 517, 2381, 2382, 2203], 902)
femmesh.addFace([290, 736, 708, 2383, 2384, 2385], 903)
femmesh.addFace([561, 682, 216, 2386, 2092, 1863], 904)
femmesh.addFace([5, 751, 6, 2141, 2102, 25], 905)
femmesh.addFace([22, 752, 23, 2105, 2143, 42], 906)
femmesh.addFace([346, 660, 493, 1183, 2387, 1604], 907)
femmesh.addFace([136, 762, 137, 2161, 2072, 163], 908)
femmesh.addFace([284, 563, 486, 1923, 2313, 2388], 909)
femmesh.addFace([439, 847, 198, 1495, 2389, 2197], 910)
femmesh.addFace([501, 785, 575, 2252, 2390, 2391], 911)
femmesh.addFace([698, 819, 438, 2392, 1478, 2299], 912)
femmesh.addFace([449, 630, 245, 2393, 2394, 2375], 913)
femmesh.addFace([446, 658, 215, 950, 2395, 1653], 914)
femmesh.addFace([458, 592, 212, 2396, 2297, 1529], 915)
femmesh.addFace([509, 730, 251, 2397, 2398, 1618], 916)
femmesh.addFace([451, 630, 293, 2399, 2400, 1760], 917)
femmesh.addFace([245, 630, 451, 2394, 2399, 2401], 918)
femmesh.addFace([441, 748, 220, 2402, 2040, 2371], 919)
femmesh.addFace([486, 569, 284, 1113, 975, 2388], 920)
femmesh.addFace([103, 632, 504, 2403, 2404, 2227], 921)
femmesh.addFace([62, 601, 61, 2405, 2272, 88], 922)
femmesh.addFace([497, 640, 149, 2406, 2257, 1874], 923)
femmesh.addFace([697, 779, 390, 2407, 2408, 1896], 924)
femmesh.addFace([313, 561, 492, 2409, 2157, 1724], 925)
femmesh.addFace([195, 661, 460, 2206, 2410, 2113], 926)
femmesh.addFace([568, 738, 454, 2411, 2116, 2304], 927)
femmesh.addFace([571, 768, 379, 2412, 1518, 2035], 928)
femmesh.addFace([490, 812, 404, 1922, 2005, 1614], 929)
femmesh.addFace([460, 661, 304, 2410, 2233, 2373], 930)
femmesh.addFace([517, 806, 786, 2382, 2413, 2414], 931)
femmesh.addFace([349, 779, 679, 2415, 1459, 1278], 932)
femmesh.addFace([456, 601, 62, 2416, 2405, 1796], 933)
femmesh.addFace([614, 685, 212, 2417, 2418, 2283], 934)
femmesh.addFace([445, 759, 326, 1610, 2419, 2290], 935)
femmesh.addFace([384, 685, 614, 2420, 2417, 2421], 936)
femmesh.addFace([442, 755, 204, 2422, 2423, 1312], 937)
femmesh.addFace([150, 704, 151, 2012, 2424, 177], 938)
femmesh.addFace([312, 819, 698, 2425, 2392, 2057], 939)
femmesh.addFace([239, 843, 441, 2426, 2427, 2193], 940)
femmesh.addFace([276, 852, 452, 2428, 2429, 1445], 941)
femmesh.addFace([241, 815, 445, 2430, 2431, 2296], 942)
femmesh.addFace([156, 624, 157, 2432, 2433, 183], 943)
femmesh.addFace([145, 631, 146, 1523, 2434, 172], 944)
femmesh.addFace([505, 731, 433, 2435, 2306, 1637], 945)
femmesh.addFace([706, 720, 330, 2082, 2436, 2437], 946)
femmesh.addFace([666, 736, 195, 2438, 2439, 2112], 947)
femmesh.addFace([210, 829, 464, 2440, 2441, 2256], 948)
femmesh.addFace([251, 679, 634, 1293, 2442, 1616], 949)
femmesh.addFace([634, 679, 385, 2442, 1155, 1141], 950)
femmesh.addFace([603, 803, 392, 2443, 2444, 1830], 951)
femmesh.addFace([284, 640, 497, 2094, 2406, 1924], 952)
femmesh.addFace([104, 632, 103, 2315, 2403, 123], 953)
femmesh.addFace([493, 660, 298, 2387, 2445, 2446], 954)
femmesh.addFace([287, 670, 471, 1356, 2447, 1146], 955)
femmesh.addFace([612, 761, 60, 2448, 2449, 2450], 956)
femmesh.addFace([512, 729, 111, 2451, 2452, 2176], 957)
femmesh.addFace([448, 727, 200, 1217, 1472, 1965], 958)
femmesh.addFace([102, 650, 101, 2453, 2454, 121], 959)
femmesh.addFace([275, 768, 571, 2455, 2412, 1178], 960)
femmesh.addFace([192, 651, 498, 2186, 2456, 2457], 961)
femmesh.addFace([396, 818, 495, 2458, 2459, 1487], 962)
femmesh.addFace([283, 574, 504, 2460, 2461, 2462], 963)
femmesh.addFace([459, 715, 194, 2095, 2463, 2278], 964)
femmesh.addFace([569, 611, 221, 1125, 2464, 2465], 965)
femmesh.addFace([634, 727, 332, 1473, 2210, 1697], 966)
femmesh.addFace([705, 825, 300, 2466, 2467, 2468], 967)
femmesh.addFace([304, 596, 511, 2234, 2469, 2470], 968)
femmesh.addFace([564, 736, 290, 2471, 2383, 2472], 969)
femmesh.addFace([511, 596, 219, 2469, 2473, 2319], 970)
femmesh.addFace([466, 692, 566, 2474, 2475, 2026], 971)
femmesh.addFace([13, 616, 14, 1993, 2476, 33], 972)
femmesh.addFace([496, 793, 258, 1989, 2477, 1206], 973)
femmesh.addFace([551, 809, 408, 2478, 1865, 1948], 974)
femmesh.addFace([230, 850, 455, 2479, 1158, 914], 975)
femmesh.addFace([113, 689, 112, 895, 2480, 132], 976)
femmesh.addFace([47, 695, 46, 2481, 1540, 73], 977)
femmesh.addFace([146, 631, 465, 2434, 1664, 1821], 978)
femmesh.addFace([309, 811, 621, 2482, 2267, 2022], 979)
femmesh.addFace([471, 670, 253, 2447, 1186, 1071], 980)
femmesh.addFace([501, 575, 288, 2391, 1482, 2347], 981)
femmesh.addFace([157, 624, 475, 2433, 2483, 1877], 982)
femmesh.addFace([511, 574, 283, 2318, 2460, 1705], 983)
femmesh.addFace([234, 779, 697, 1460, 2407, 2163], 984)
femmesh.addFace([324, 612, 601, 1639, 2270, 2484], 985)
femmesh.addFace([504, 574, 337, 2461, 2485, 2486], 986)
femmesh.addFace([307, 600, 585, 2324, 2487, 2488], 987)
femmesh.addFace([236, 803, 603, 2237, 2443, 2230], 988)
femmesh.addFace([522, 792, 281, 2312, 2329, 1952], 989)
femmesh.addFace([618, 754, 330, 2489, 2490, 2491], 990)
femmesh.addFace([393, 754, 618, 2492, 2489, 1602], 991)
femmesh.addFace([543, 737, 222, 2493, 2353, 1709], 992)
femmesh.addFace([503, 577, 277, 2169, 2494, 1374], 993)
femmesh.addFace([252, 760, 464, 2055, 2495, 1027], 994)
femmesh.addFace([375, 751, 746, 2496, 2140, 2497], 995)
femmesh.addFace([745, 752, 376, 2144, 2498, 2499], 996)
femmesh.addFace([529, 699, 262, 2500, 2501, 1346], 997)
femmesh.addFace([323, 847, 690, 2502, 1494, 2503], 998)
femmesh.addFace([511, 578, 304, 1704, 2504, 2470], 999)
femmesh.addFace([451, 698, 245, 1646, 2298, 2401], 1000)
femmesh.addFace([518, 717, 100, 2505, 2506, 2260], 1001)
femmesh.addFace([503, 733, 215, 1299, 2216, 2170], 1002)
femmesh.addFace([354, 809, 551, 1968, 2478, 2286], 1003)
femmesh.addFace([199, 844, 590, 2507, 1739, 2107], 1004)
femmesh.addFace([386, 710, 524, 1623, 2508, 1542], 1005)
femmesh.addFace([282, 788, 523, 2345, 2348, 1969], 1006)
femmesh.addFace([16, 705, 17, 2509, 2510, 36], 1007)
femmesh.addFace([378, 803, 623, 2511, 2236, 1525], 1008)
femmesh.addFace([455, 673, 351, 1800, 2512, 915], 1009)
femmesh.addFace([251, 730, 456, 2398, 2513, 1294], 1010)
femmesh.addFace([273, 666, 460, 1249, 2111, 1471], 1011)
femmesh.addFace([472, 642, 297, 2016, 1944, 1642], 1012)
femmesh.addFace([316, 564, 548, 2514, 2515, 2516], 1013)
femmesh.addFace([148, 707, 497, 2517, 2518, 1873], 1014)
femmesh.addFace([497, 707, 318, 2518, 2198, 1920], 1015)
femmesh.addFace([509, 710, 324, 2519, 1622, 2520], 1016)
femmesh.addFace([218, 710, 509, 2521, 2519, 2522], 1017)
femmesh.addFace([155, 734, 156, 2352, 2523, 182], 1018)
femmesh.addFace([362, 848, 531, 2524, 2171, 1784], 1019)
femmesh.addFace([514, 596, 361, 2525, 2154, 1264], 1020)
femmesh.addFace([343, 681, 457, 2526, 1420, 856], 1021)
femmesh.addFace([191, 657, 480, 2527, 2528, 1569], 1022)
femmesh.addFace([256, 765, 453, 2529, 2530, 1750], 1023)
femmesh.addFace([683, 773, 443, 2531, 1407, 1189], 1024)
femmesh.addFace([241, 773, 683, 2295, 2531, 2115], 1025)
femmesh.addFace([472, 653, 338, 1641, 2532, 2533], 1026)
femmesh.addFace([480, 657, 299, 2528, 2534, 2535], 1027)
femmesh.addFace([571, 711, 355, 2536, 2537, 1179], 1028)
femmesh.addFace([227, 711, 571, 1595, 2536, 2080], 1029)
femmesh.addFace([418, 755, 559, 2538, 2539, 1719], 1030)
femmesh.addFace([52, 831, 51, 2540, 2541, 78], 1031)
femmesh.addFace([453, 849, 221, 2542, 2543, 1861], 1032)
femmesh.addFace([215, 658, 577, 2395, 2544, 2168], 1033)
femmesh.addFace([617, 821, 329, 2208, 2545, 2546], 1034)
femmesh.addFace([16, 805, 705, 2547, 2548, 2509], 1035)
femmesh.addFace([560, 647, 295, 2549, 1574, 1846], 1036)
femmesh.addFace([144, 687, 145, 1849, 1538, 171], 1037)
femmesh.addFace([504, 632, 283, 2404, 2228, 2462], 1038)
femmesh.addFace([386, 761, 612, 2550, 2448, 1638], 1039)
femmesh.addFace([534, 558, 270, 2551, 2159, 2552], 1040)
femmesh.addFace([371, 558, 534, 1425, 2551, 1426], 1041)
femmesh.addFace([525, 826, 341, 2553, 2554, 1243], 1042)
femmesh.addFace([290, 767, 548, 2188, 2555, 2556], 1043)
femmesh.addFace([61, 612, 60, 2271, 2450, 87], 1044)
femmesh.addFace([464, 829, 287, 2441, 1354, 1032], 1045)
femmesh.addFace([463, 696, 309, 1324, 2246, 2023], 1046)
femmesh.addFace([508, 681, 269, 1421, 2557, 2204], 1047)
femmesh.addFace([286, 810, 582, 2558, 2241, 1390], 1048)
femmesh.addFace([225, 706, 525, 2559, 2560, 1286], 1049)
femmesh.addFace([361, 739, 605, 1903, 2561, 1244], 1050)
femmesh.addFace([525, 706, 330, 2560, 2437, 2562], 1051)
femmesh.addFace([470, 676, 207, 1710, 2076, 2174], 1052)
femmesh.addFace([325, 723, 510, 1976, 2563, 2564], 1053)
femmesh.addFace([376, 762, 745, 1255, 2160, 2499], 1054)
femmesh.addFace([491, 829, 210, 1355, 2440, 1257], 1055)
femmesh.addFace([510, 723, 106, 2563, 2565, 2036], 1056)
femmesh.addFace([23, 745, 2, 2145, 2566, 43], 1057)
femmesh.addFace([1, 746, 5, 2567, 2142, 24], 1058)
femmesh.addFace([467, 817, 363, 2568, 2569, 1589], 1059)
femmesh.addFace([2, 745, 136, 2566, 2162, 162], 1060)
femmesh.addFace([44, 746, 1, 2570, 2567, 70], 1061)
femmesh.addFace([336, 600, 499, 1014, 2323, 2089], 1062)
femmesh.addFace([486, 662, 295, 2571, 1844, 1114], 1063)
femmesh.addFace([489, 765, 256, 2572, 2529, 1730], 1064)
femmesh.addFace([319, 692, 466, 2573, 2474, 965], 1065)
femmesh.addFace([324, 730, 509, 2574, 2397, 2520], 1066)
femmesh.addFace([475, 783, 258, 2575, 1313, 2576], 1067)
femmesh.addFace([616, 742, 14, 2577, 2578, 2476], 1068)
femmesh.addFace([293, 557, 556, 1565, 2579, 1759], 1069)
femmesh.addFace([264, 686, 474, 1587, 1340, 2244], 1070)
femmesh.addFace([485, 805, 15, 2580, 2581, 2582], 1071)
femmesh.addFace([556, 557, 313, 2579, 2583, 1723], 1072)
femmesh.addFace([638, 817, 467, 2584, 2568, 2195], 1073)
femmesh.addFace([493, 625, 109, 2585, 2586, 1592], 1074)
femmesh.addFace([469, 810, 348, 2242, 2587, 927], 1075)
femmesh.addFace([254, 846, 477, 2588, 2589, 993], 1076)
femmesh.addFace([208, 842, 476, 2152, 2362, 2178], 1077)
femmesh.addFace([713, 793, 336, 2339, 1988, 2088], 1078)
femmesh.addFace([620, 668, 357, 2590, 2591, 880], 1079)
femmesh.addFace([197, 668, 620, 2592, 2590, 1837], 1080)
femmesh.addFace([327, 765, 489, 2593, 2572, 2594], 1081)
femmesh.addFace([51, 831, 537, 2541, 2595, 2596], 1082)
femmesh.addFace([299, 693, 480, 2597, 2598, 2535], 1083)
femmesh.addFace([212, 685, 483, 2418, 2599, 1527], 1084)
femmesh.addFace([298, 625, 493, 1942, 2585, 2446], 1085)
femmesh.addFace([483, 685, 311, 2599, 2600, 2601], 1086)
femmesh.addFace([541, 845, 320, 2602, 2603, 2604], 1087)
femmesh.addFace([597, 705, 300, 2605, 2468, 2125], 1088)
femmesh.addFace([140, 602, 141, 2191, 1291, 167], 1089)
femmesh.addFace([492, 627, 196, 1265, 1780, 1718], 1090)
femmesh.addFace([470, 768, 275, 2173, 2455, 1051], 1091)
femmesh.addFace([198, 836, 598, 2606, 1881, 2078], 1092)
femmesh.addFace([477, 846, 311, 2589, 2607, 945], 1093)
femmesh.addFace([190, 673, 488, 2608, 1799, 2609], 1094)
femmesh.addFace([15, 742, 485, 2610, 2611, 2582], 1095)
femmesh.addFace([338, 734, 472, 2612, 2351, 2533], 1096)
femmesh.addFace([484, 663, 267, 1590, 2099, 2613], 1097)
femmesh.addFace([473, 792, 205, 2330, 1953, 907], 1098)
femmesh.addFace([262, 776, 473, 2614, 2334, 896], 1099)
femmesh.addFace([9, 589, 10, 2213, 1271, 29], 1100)
femmesh.addFace([555, 570, 193, 2615, 1996, 1745], 1101)
femmesh.addFace([719, 740, 206, 1980, 2616, 1819], 1102)
femmesh.addFace([217, 662, 486, 2617, 2571, 2314], 1103)
femmesh.addFace([548, 564, 290, 2515, 2472, 2556], 1104)
femmesh.addFace([512, 820, 729, 2618, 2619, 2451], 1105)
femmesh.addFace([292, 694, 507, 2620, 2621, 2000], 1106)
femmesh.addFace([551, 605, 316, 2622, 2623, 2292], 1107)
femmesh.addFace([204, 788, 474, 1972, 2344, 1341], 1108)
femmesh.addFace([233, 605, 551, 1245, 2622, 1947], 1109)
femmesh.addFace([224, 821, 718, 2624, 2625, 2626], 1110)
femmesh.addFace([476, 724, 278, 2288, 901, 2303], 1111)
femmesh.addFace([257, 833, 478, 2627, 2308, 1778], 1112)
femmesh.addFace([480, 693, 253, 2598, 2355, 997], 1113)
femmesh.addFace([258, 793, 475, 2477, 2338, 2576], 1114)
femmesh.addFace([51, 712, 50, 2628, 2062, 77], 1115)
femmesh.addFace([190, 799, 477, 2629, 1306, 947], 1116)
femmesh.addFace([489, 747, 327, 2630, 2631, 2594], 1117)
femmesh.addFace([526, 585, 341, 2632, 1241, 875], 1118)
femmesh.addFace([211, 643, 490, 2010, 2316, 1613], 1119)
femmesh.addFace([435, 684, 629, 2097, 2633, 2326], 1120)
femmesh.addFace([347, 830, 502, 2090, 2634, 994], 1121)
femmesh.addFace([219, 596, 514, 2473, 2525, 1000], 1122)
femmesh.addFace([307, 585, 526, 2488, 2632, 2635], 1123)
femmesh.addFace([655, 657, 362, 2636, 2637, 1783], 1124)
femmesh.addFace([299, 657, 655, 2534, 2636, 1677], 1125)
femmesh.addFace([214, 721, 505, 2333, 2638, 1636], 1126)
femmesh.addFace([315, 576, 565, 2639, 2640, 2123], 1127)
femmesh.addFace([334, 799, 488, 1307, 2641, 2349], 1128)
femmesh.addFace([418, 807, 755, 2642, 2643, 2538], 1129)
femmesh.addFace([488, 799, 190, 2641, 2629, 2609], 1130)
femmesh.addFace([17, 597, 18, 2644, 2133, 37], 1131)
femmesh.addFace([350, 570, 555, 1270, 2615, 1090], 1132)
femmesh.addFace([374, 828, 481, 1316, 2148, 1238], 1133)
femmesh.addFace([515, 607, 240, 2064, 1654, 1197], 1134)
femmesh.addFace([756, 778, 399, 2645, 1598, 2646], 1135)
femmesh.addFace([138, 606, 139, 1883, 971, 165], 1136)
femmesh.addFace([160, 732, 161, 2647, 2648, 187], 1137)
femmesh.addFace([718, 821, 426, 2625, 2207, 1302], 1138)
femmesh.addFace([300, 825, 629, 2467, 1888, 2649], 1139)
femmesh.addFace([498, 651, 315, 2456, 2650, 1137], 1140)
femmesh.addFace([311, 846, 483, 2607, 2651, 2601], 1141)
femmesh.addFace([519, 841, 420, 2652, 2240, 1773], 1142)
femmesh.addFace([319, 699, 529, 2653, 2500, 2654], 1143)
femmesh.addFace([156, 734, 624, 2523, 2655, 2432], 1144)
femmesh.addFace([482, 719, 317, 1981, 2656, 2039], 1145)
femmesh.addFace([292, 722, 608, 2657, 1110, 2658], 1146)
femmesh.addFace([221, 637, 569, 2659, 974, 2465], 1147)
femmesh.addFace([53, 628, 52, 2379, 2660, 79], 1148)
femmesh.addFace([298, 660, 562, 2445, 2661, 1833], 1149)
femmesh.addFace([109, 625, 108, 2586, 1765, 128], 1150)
femmesh.addFace([223, 578, 553, 2662, 1812, 2663], 1151)
femmesh.addFace([58, 775, 57, 2137, 2664, 84], 1152)
femmesh.addFace([7, 622, 8, 1907, 1175, 27], 1153)
femmesh.addFace([20, 621, 21, 1835, 1908, 40], 1154)
femmesh.addFace([482, 782, 322, 2665, 2666, 2025], 1155)
femmesh.addFace([232, 782, 482, 1744, 2665, 2202], 1156)
femmesh.addFace([546, 838, 410, 2667, 2118, 1975], 1157)
femmesh.addFace([485, 742, 353, 2611, 2668, 1007], 1158)
femmesh.addFace([645, 845, 541, 2669, 2602, 2670], 1159)
femmesh.addFace([267, 769, 484, 2671, 1441, 2613], 1160)
femmesh.addFace([537, 712, 51, 2310, 2628, 2596], 1161)
femmesh.addFace([495, 818, 98, 2459, 2672, 1491], 1162)
femmesh.addFace([565, 576, 301, 2640, 2673, 1455], 1163)
femmesh.addFace([382, 805, 485, 2674, 2580, 1284], 1164)
femmesh.addFace([619, 673, 190, 2675, 2608, 2180], 1165)
femmesh.addFace([351, 673, 619, 2512, 2675, 1351], 1166)
femmesh.addFace([421, 807, 532, 2068, 2676, 1762], 1167)
femmesh.addFace([235, 749, 725, 2677, 2678, 2679], 1168)
femmesh.addFace([311, 685, 680, 2600, 2680, 1761], 1169)
femmesh.addFace([680, 685, 384, 2680, 2420, 2681], 1170)
femmesh.addFace([307, 732, 499, 2682, 2683, 2325], 1171)
femmesh.addFace([588, 714, 224, 2684, 2685, 2686], 1172)
femmesh.addFace([755, 807, 204, 2643, 2067, 2423], 1173)
femmesh.addFace([367, 664, 594, 2687, 2688, 1330], 1174)
femmesh.addFace([15, 805, 16, 2581, 2547, 35], 1175)
femmesh.addFace([317, 738, 528, 2117, 2689, 2037], 1176)
femmesh.addFace([487, 795, 202, 1509, 1489, 860], 1177)
femmesh.addFace([331, 737, 543, 2073, 2493, 2264], 1178)
femmesh.addFace([496, 706, 225, 2273, 2559, 1103], 1179)
femmesh.addFace([540, 726, 285, 1982, 2129, 1673], 1180)
femmesh.addFace([578, 648, 304, 2690, 2372, 2504], 1181)
femmesh.addFace([289, 587, 572, 2321, 2691, 2221], 1182)
femmesh.addFace([97, 796, 3, 2692, 2693, 116], 1183)
femmesh.addFace([572, 587, 356, 2691, 1170, 1220], 1184)
femmesh.addFace([219, 786, 574, 2694, 2695, 2317], 1185)
femmesh.addFace([244, 835, 811, 1198, 2696, 2697], 1186)
femmesh.addFace([494, 722, 292, 1631, 2657, 2147], 1187)
femmesh.addFace([229, 692, 529, 2698, 2699, 1409], 1188)
femmesh.addFace([529, 692, 319, 2699, 2573, 2654], 1189)
femmesh.addFace([502, 695, 47, 1686, 2481, 2106], 1190)
femmesh.addFace([3, 796, 69, 2693, 1667, 96], 1191)
femmesh.addFace([224, 653, 588, 2700, 1721, 2686], 1192)
femmesh.addFace([152, 747, 489, 2701, 2630, 1040], 1193)
femmesh.addFace([725, 749, 401, 2678, 1811, 2702], 1194)
femmesh.addFace([347, 778, 756, 967, 2645, 2091], 1195)
femmesh.addFace([224, 714, 626, 2685, 2703, 2704], 1196)
femmesh.addFace([502, 830, 270, 2634, 2705, 1687], 1197)
femmesh.addFace([458, 693, 592, 2356, 2706, 2396], 1198)
femmesh.addFace([353, 742, 616, 2668, 2577, 2707], 1199)
femmesh.addFace([615, 754, 393, 2708, 2492, 1682], 1200)
femmesh.addFace([377, 784, 544, 2709, 1893, 1496], 1201)
femmesh.addFace([14, 742, 15, 2578, 2610, 34], 1202)
femmesh.addFace([101, 650, 518, 2454, 2710, 2259], 1203)
femmesh.addFace([266, 754, 615, 2711, 2708, 2712], 1204)
femmesh.addFace([266, 826, 525, 2713, 2553, 2714], 1205)
femmesh.addFace([291, 791, 607, 2715, 2046, 1462], 1206)
femmesh.addFace([507, 694, 238, 2621, 2135, 866], 1207)
femmesh.addFace([576, 832, 301, 2716, 2717, 2673], 1208)
femmesh.addFace([617, 720, 365, 2718, 2081, 1876], 1209)
femmesh.addFace([197, 720, 617, 2719, 2718, 2720], 1210)
femmesh.addFace([518, 822, 717, 2721, 2722, 2505], 1211)
femmesh.addFace([499, 732, 160, 2683, 2647, 1815], 1212)
femmesh.addFace([66, 770, 65, 2723, 1868, 92], 1213)
femmesh.addFace([112, 689, 512, 2480, 2724, 2175], 1214)
femmesh.addFace([593, 649, 321, 2725, 2726, 2727], 1215)
femmesh.addFace([331, 649, 593, 1662, 2725, 2074], 1216)
femmesh.addFace([530, 761, 386, 2728, 2550, 1499], 1217)
femmesh.addFace([585, 600, 225, 2487, 1102, 1285], 1218)
femmesh.addFace([115, 763, 114, 2729, 2730, 134], 1219)
femmesh.addFace([536, 753, 288, 1869, 2731, 2732], 1220)
femmesh.addFace([505, 721, 296, 2638, 2733, 1173], 1221)
femmesh.addFace([330, 754, 525, 2490, 2734, 2562], 1222)
femmesh.addFace([217, 677, 662, 2182, 2735, 2617], 1223)
femmesh.addFace([662, 677, 360, 2735, 2736, 1884], 1224)
femmesh.addFace([498, 784, 192, 1894, 2737, 2457], 1225)
femmesh.addFace([525, 754, 266, 2734, 2711, 2714], 1226)
femmesh.addFace([519, 774, 243, 2738, 2739, 1737], 1227)
femmesh.addFace([301, 832, 598, 2717, 2740, 1770], 1228)
femmesh.addFace([310, 703, 577, 1379, 2741, 2742], 1229)
femmesh.addFace([359, 731, 505, 2743, 2435, 1172], 1230)
femmesh.addFace([668, 840, 357, 1963, 2184, 2591], 1231)
femmesh.addFace([774, 834, 243, 2744, 1578, 2739], 1232)
femmesh.addFace([518, 806, 269, 2745, 2381, 2746], 1233)
femmesh.addFace([647, 833, 368, 2309, 2747, 1605], 1234)
femmesh.addFace([639, 769, 366, 1442, 2748, 2749], 1235)
femmesh.addFace([709, 852, 342, 2750, 2751, 2752], 1236)
femmesh.addFace([452, 852, 709, 2429, 2750, 1803], 1237)
femmesh.addFace([566, 692, 229, 2475, 2698, 2753], 1238)
femmesh.addFace([367, 844, 664, 2754, 2755, 2687], 1239)
femmesh.addFace([326, 759, 646, 2419, 1596, 2756], 1240)
femmesh.addFace([705, 805, 382, 2548, 2674, 2757], 1241)
femmesh.addFace([161, 732, 580, 2648, 2758, 1492], 1242)
femmesh.addFace([526, 763, 307, 2759, 2760, 2635], 1243)
femmesh.addFace([718, 783, 338, 1314, 2761, 2762], 1244)
femmesh.addFace([509, 814, 218, 2763, 1754, 2522], 1245)
femmesh.addFace([332, 814, 509, 2764, 2763, 1698], 1246)
femmesh.addFace([382, 825, 705, 2350, 2466, 2757], 1247)
femmesh.addFace([512, 826, 266, 2765, 2713, 2766], 1248)
femmesh.addFace([244, 811, 532, 2697, 2767, 1933], 1249)
femmesh.addFace([288, 753, 552, 2731, 1728, 2346], 1250)
femmesh.addFace([750, 846, 254, 2768, 2588, 1558], 1251)
femmesh.addFace([483, 846, 750, 2651, 2768, 2029], 1252)
femmesh.addFace([527, 672, 302, 1978, 1446, 1781], 1253)
femmesh.addFace([337, 798, 504, 2769, 2225, 2486], 1254)
femmesh.addFace([550, 648, 223, 1628, 2770, 1058], 1255)
femmesh.addFace([302, 645, 541, 1348, 2670, 1043], 1256)
femmesh.addFace([506, 836, 366, 1882, 2771, 1252], 1257)
femmesh.addFace([147, 707, 148, 2253, 2517, 174], 1258)
femmesh.addFace([102, 798, 650, 2258, 2772, 2453], 1259)
femmesh.addFace([294, 638, 603, 2773, 2774, 1829], 1260)
femmesh.addFace([740, 744, 206, 2775, 1625, 2616], 1261)
femmesh.addFace([603, 638, 423, 2774, 2279, 2231], 1262)
femmesh.addFace([158, 713, 159, 2281, 1852, 185], 1263)
femmesh.addFace([417, 744, 740, 2020, 2775, 2776], 1264)
femmesh.addFace([100, 717, 99, 2506, 1180, 119], 1265)
femmesh.addFace([313, 682, 561, 2777, 2386, 2409], 1266)
femmesh.addFace([516, 802, 261, 2778, 1857, 1512], 1267)
femmesh.addFace([508, 845, 209, 2779, 2780, 1211], 1268)
femmesh.addFace([575, 785, 220, 2390, 2369, 1708], 1269)
femmesh.addFace([320, 845, 508, 2603, 2779, 2049], 1270)
femmesh.addFace([106, 723, 105, 2565, 1850, 125], 1271)
femmesh.addFace([510, 802, 325, 1858, 2781, 2564], 1272)
femmesh.addFace([266, 820, 512, 2782, 2618, 2766], 1273)
femmesh.addFace([111, 729, 110, 2452, 1404, 130], 1274)
femmesh.addFace([524, 710, 218, 2508, 2521, 2305], 1275)
femmesh.addFace([514, 777, 320, 1263, 2783, 960], 1276)
femmesh.addFace([151, 747, 152, 2784, 2701, 178], 1277)
femmesh.addFace([194, 647, 560, 2054, 2549, 2277], 1278)
femmesh.addFace([545, 842, 407, 2363, 2151, 1945], 1279)
femmesh.addFace([348, 823, 533, 2785, 2786, 1068], 1280)
femmesh.addFace([316, 739, 564, 2787, 2788, 2514], 1281)
femmesh.addFace([532, 807, 418, 2676, 2642, 1934], 1282)
femmesh.addFace([608, 694, 292, 2789, 2620, 2658], 1283)
femmesh.addFace([223, 648, 578, 2770, 2690, 2662], 1284)
femmesh.addFace([314, 716, 542, 1322, 2790, 2366], 1285)
femmesh.addFace([355, 711, 646, 2537, 2791, 888], 1286)
femmesh.addFace([449, 644, 630, 2361, 2792, 2393], 1287)
femmesh.addFace([646, 711, 326, 2791, 1400, 2756], 1288)
femmesh.addFace([811, 835, 415, 2696, 2793, 2268], 1289)
femmesh.addFace([708, 736, 340, 2384, 2794, 2795], 1290)
femmesh.addFace([562, 660, 226, 2661, 1160, 1738], 1291)
femmesh.addFace([325, 802, 516, 2781, 2778, 2796], 1292)
femmesh.addFace([405, 832, 812, 2797, 2798, 1921], 1293)
femmesh.addFace([228, 675, 548, 1115, 2799, 2800], 1294)
femmesh.addFace([60, 761, 59, 2449, 2801, 86], 1295)
femmesh.addFace([223, 801, 516, 2802, 2803, 1059], 1296)
femmesh.addFace([548, 675, 316, 2799, 2291, 2516], 1297)
femmesh.addFace([516, 801, 325, 2803, 1619, 2796], 1298)
femmesh.addFace([554, 771, 319, 2804, 2805, 964], 1299)
femmesh.addFace([340, 736, 666, 2794, 2438, 916], 1300)
femmesh.addFace([517, 786, 219, 2414, 2694, 999], 1301)
femmesh.addFace([513, 844, 367, 1740, 2754, 1561], 1302)
femmesh.addFace([64, 780, 63, 2806, 2232, 90], 1303)
femmesh.addFace([641, 702, 334, 2807, 862, 2149], 1304)
femmesh.addFace([577, 658, 310, 2544, 2065, 2742], 1305)
femmesh.addFace([398, 804, 699, 1474, 2808, 2809], 1306)
femmesh.addFace([414, 834, 774, 2810, 2744, 2121], 1307)
femmesh.addFace([699, 804, 262, 2808, 2811, 2501], 1308)
femmesh.addFace([566, 756, 399, 2812, 2646, 1985], 1309)
femmesh.addFace([45, 787, 44, 2813, 2814, 71], 1310)
femmesh.addFace([269, 822, 518, 2815, 2721, 2746], 1311)
femmesh.addFace([464, 760, 586, 2495, 2816, 2255], 1312)
femmesh.addFace([98, 818, 97, 2672, 2817, 117], 1313)
femmesh.addFace([315, 824, 576, 2818, 2819, 2639], 1314)
femmesh.addFace([329, 668, 617, 1950, 2820, 2546], 1315)
femmesh.addFace([192, 784, 633, 2737, 2821, 1905], 1316)
femmesh.addFace([617, 668, 197, 2820, 2592, 2720], 1317)
femmesh.addFace([308, 774, 519, 2337, 2738, 1772], 1318)
femmesh.addFace([633, 784, 377, 2821, 2709, 1537], 1319)
femmesh.addFace([294, 817, 638, 2822, 2584, 2773], 1320)
femmesh.addFace([528, 738, 280, 2689, 2823, 2824], 1321)
femmesh.addFace([57, 775, 521, 2664, 1867, 2061], 1322)
femmesh.addFace([400, 841, 519, 2109, 2652, 1736], 1323)
femmesh.addFace([294, 652, 604, 1911, 2052, 2825], 1324)
femmesh.addFace([307, 763, 580, 2760, 2826, 2827], 1325)
femmesh.addFace([235, 725, 557, 2679, 2828, 1570], 1326)
femmesh.addFace([573, 703, 370, 2829, 1378, 1399], 1327)
femmesh.addFace([277, 703, 573, 2830, 2829, 1797], 1328)
femmesh.addFace([533, 823, 207, 2786, 2376, 2077], 1329)
femmesh.addFace([542, 716, 230, 2790, 2831, 2832], 1330)
femmesh.addFace([280, 772, 528, 2833, 2250, 2824], 1331)
femmesh.addFace([624, 734, 338, 2655, 2612, 2834], 1332)
femmesh.addFace([406, 694, 608, 2136, 2789, 2100], 1333)
femmesh.addFace([114, 763, 526, 2730, 2759, 1820], 1334)
femmesh.addFace([623, 769, 267, 2835, 2671, 1526], 1335)
femmesh.addFace([317, 719, 613, 2656, 1818, 1701], 1336)
femmesh.addFace([366, 769, 623, 2748, 2835, 1328], 1337)
femmesh.addFace([557, 725, 313, 2828, 2836, 2583], 1338)
femmesh.addFace([296, 659, 595, 2001, 2245, 1221], 1339)
femmesh.addFace([580, 763, 115, 2826, 2729, 1513], 1340)
femmesh.addFace([674, 714, 358, 2837, 2838, 913], 1341)
femmesh.addFace([390, 779, 599, 2408, 2839, 1669], 1342)
femmesh.addFace([314, 714, 674, 2840, 2837, 1143], 1343)
femmesh.addFace([278, 771, 554, 2045, 2804, 2302], 1344)
femmesh.addFace([59, 761, 530, 2801, 2728, 1483], 1345)
femmesh.addFace([194, 715, 649, 2463, 2841, 1659], 1346)
femmesh.addFace([594, 664, 310, 2688, 1099, 2066], 1347)
femmesh.addFace([580, 732, 307, 2758, 2682, 2827], 1348)
femmesh.addFace([535, 787, 45, 2842, 2813, 1556], 1349)
femmesh.addFace([636, 791, 291, 2843, 2715, 2031], 1350)
femmesh.addFace([373, 791, 636, 2248, 2843, 1428], 1351)
femmesh.addFace([630, 644, 293, 2792, 2265, 2400], 1352)
femmesh.addFace([388, 749, 735, 1657, 2844, 2845], 1353)
femmesh.addFace([228, 767, 655, 2846, 1990, 1791], 1354)
femmesh.addFace([375, 787, 535, 2847, 2842, 1927], 1355)
femmesh.addFace([368, 691, 611, 2848, 2849, 1606], 1356)
femmesh.addFace([342, 852, 537, 2751, 2850, 2851], 1357)
femmesh.addFace([537, 852, 276, 2850, 2428, 2311], 1358)
femmesh.addFace([574, 786, 337, 2695, 2852, 2485], 1359)
femmesh.addFace([592, 693, 299, 2706, 2597, 2294], 1360)
femmesh.addFace([320, 777, 541, 2783, 2853, 2604], 1361)
femmesh.addFace([456, 730, 601, 2513, 2854, 2416], 1362)
femmesh.addFace([643, 790, 374, 2009, 1207, 1477], 1363)
femmesh.addFace([735, 749, 235, 2844, 2677, 1468], 1364)
femmesh.addFace([577, 703, 277, 2741, 2830, 2494], 1365)
femmesh.addFace([812, 832, 246, 2798, 2855, 2006], 1366)
femmesh.addFace([368, 764, 691, 2856, 2857, 2848], 1367)
femmesh.addFace([288, 790, 536, 1481, 2027, 2732], 1368)
femmesh.addFace([729, 820, 346, 2619, 2858, 1603], 1369)
femmesh.addFace([532, 811, 309, 2767, 2482, 1763], 1370)
femmesh.addFace([786, 806, 337, 2413, 2859, 2852], 1371)
femmesh.addFace([624, 783, 475, 2860, 2575, 2483], 1372)
femmesh.addFace([708, 743, 384, 2861, 2862, 2863], 1373)
femmesh.addFace([340, 743, 708, 1225, 2861, 2795], 1374)
femmesh.addFace([654, 795, 328, 1490, 1508, 2864], 1375)
femmesh.addFace([665, 701, 339, 2865, 1092, 1516], 1376)
femmesh.addFace([344, 701, 665, 1458, 2865, 1133], 1377)
femmesh.addFace([541, 777, 233, 2853, 1003, 1048], 1378)
femmesh.addFace([604, 817, 294, 2866, 2822, 2825], 1379)
femmesh.addFace([363, 817, 604, 2569, 2866, 891], 1380)
femmesh.addFace([425, 813, 589, 2336, 2867, 2212], 1381)
femmesh.addFace([270, 830, 534, 2705, 1792, 2552], 1382)
femmesh.addFace([327, 747, 704, 2631, 2868, 2239], 1383)
femmesh.addFace([322, 782, 546, 2666, 1743, 1974], 1384)
femmesh.addFace([384, 743, 680, 2862, 2869, 2681], 1385)
femmesh.addFace([680, 743, 231, 2869, 1961, 1022], 1386)
femmesh.addFace([548, 767, 228, 2555, 2846, 2800], 1387)
femmesh.addFace([537, 831, 342, 2595, 2870, 2851], 1388)
femmesh.addFace([245, 766, 610, 2343, 2871, 2374], 1389)
femmesh.addFace([610, 766, 348, 2871, 2872, 2873], 1390)
femmesh.addFace([195, 736, 564, 2439, 2471, 2874], 1391)
femmesh.addFace([559, 755, 442, 2539, 2422, 2167], 1392)
femmesh.addFace([599, 780, 64, 2875, 2806, 2301], 1393)
femmesh.addFace([564, 739, 195, 2788, 2205, 2874], 1394)
femmesh.addFace([349, 780, 599, 2132, 2875, 2876], 1395)
femmesh.addFace([358, 714, 588, 2838, 2684, 1910], 1396)
femmesh.addFace([280, 738, 568, 2823, 2411, 1899], 1397)
femmesh.addFace([611, 691, 221, 2849, 1859, 2464], 1398)
femmesh.addFace([431, 824, 651, 2360, 2877, 2185], 1399)
femmesh.addFace([17, 705, 597, 2510, 2605, 2644], 1400)
femmesh.addFace([650, 806, 518, 2878, 2745, 2710], 1401)
femmesh.addFace([664, 844, 199, 2755, 2507, 2380], 1402)
femmesh.addFace([629, 684, 300, 2633, 2238, 2649], 1403)
femmesh.addFace([598, 832, 405, 2740, 2797, 2079], 1404)
femmesh.addFace([230, 840, 542, 1635, 1962, 2832], 1405)
femmesh.addFace([424, 808, 602, 2322, 2879, 2190], 1406)
femmesh.addFace([290, 708, 614, 2385, 2880, 2287], 1407)
femmesh.addFace([614, 708, 384, 2880, 2863, 2421], 1408)
femmesh.addFace([605, 739, 316, 2561, 2787, 2623], 1409)
femmesh.addFace([370, 838, 546, 1547, 2667, 1126], 1410)
femmesh.addFace([553, 801, 223, 1688, 2802, 2663], 1411)
femmesh.addFace([656, 721, 437, 2881, 2332, 2354], 1412)
femmesh.addFace([198, 847, 639, 2389, 2882, 2883], 1413)
femmesh.addFace([296, 721, 656, 2733, 2881, 1936], 1414)
femmesh.addFace([691, 764, 328, 2857, 2884, 1864], 1415)
femmesh.addFace([593, 843, 239, 2885, 2426, 1372], 1416)
femmesh.addFace([229, 756, 566, 2075, 2812, 2753], 1417)
femmesh.addFace([677, 690, 360, 2886, 1082, 2736], 1418)
femmesh.addFace([651, 824, 315, 2877, 2818, 2650], 1419)
femmesh.addFace([394, 758, 616, 1939, 2887, 1992], 1420)
femmesh.addFace([319, 771, 699, 2805, 2888, 2653], 1421)
femmesh.addFace([626, 714, 314, 2703, 2840, 2365], 1422)
femmesh.addFace([610, 810, 286, 2889, 2558, 2358], 1423)
femmesh.addFace([333, 702, 641, 2340, 2807, 1276], 1424)
femmesh.addFace([618, 720, 197, 2890, 2719, 1838], 1425)
femmesh.addFace([330, 720, 618, 2436, 2890, 2491], 1426)
femmesh.addFace([579, 770, 66, 1714, 2723, 2261], 1427)
femmesh.addFace([583, 772, 280, 2891, 2833, 1467], 1428)
femmesh.addFace([601, 730, 324, 2854, 2574, 2484], 1429)
femmesh.addFace([348, 810, 610, 2587, 2889, 2873], 1430)
femmesh.addFace([654, 764, 257, 2892, 2893, 2014], 1431)
femmesh.addFace([323, 690, 677, 2503, 2886, 2183], 1432)
femmesh.addFace([649, 715, 321, 2841, 2070, 2726], 1433)
femmesh.addFace([586, 760, 335, 2816, 1593, 1298], 1434)
femmesh.addFace([391, 772, 583, 1691, 2891, 1854], 1435)
femmesh.addFace([653, 718, 338, 2894, 2762, 2532], 1436)
femmesh.addFace([224, 718, 653, 2626, 2894, 2700], 1437)
femmesh.addFace([313, 725, 682, 2836, 2895, 2777], 1438)
femmesh.addFace([403, 851, 827, 1752, 2896, 2897], 1439)
femmesh.addFace([669, 758, 237, 2898, 1938, 1567], 1440)
femmesh.addFace([353, 758, 669, 2899, 2898, 885], 1441)
femmesh.addFace([576, 824, 246, 2819, 2359, 2900], 1442)
femmesh.addFace([626, 821, 224, 2901, 2624, 2704], 1443)
femmesh.addFace([678, 819, 312, 2902, 2425, 1825], 1444)
femmesh.addFace([379, 819, 678, 1236, 2902, 2033], 1445)
femmesh.addFace([740, 741, 417, 2903, 2156, 2776], 1446)
femmesh.addFace([322, 741, 740, 1958, 2903, 2024], 1447)
femmesh.addFace([366, 836, 639, 2771, 2904, 2749], 1448)
femmesh.addFace([639, 836, 198, 2904, 2606, 2883], 1449)
femmesh.addFace([246, 832, 576, 2855, 2716, 2900], 1450)
femmesh.addFace([589, 813, 359, 2867, 2905, 1272], 1451)
femmesh.addFace([616, 758, 353, 2887, 2899, 2707], 1452)
femmesh.addFace([599, 779, 349, 2839, 2415, 2876], 1453)
femmesh.addFace([766, 823, 348, 2906, 2785, 2872], 1454)
femmesh.addFace([445, 815, 628, 2431, 2907, 2378], 1455)
femmesh.addFace([628, 815, 342, 2907, 2908, 2909], 1456)
femmesh.addFace([221, 849, 637, 2543, 2910, 2659], 1457)
femmesh.addFace([591, 814, 332, 2209, 2764, 2211], 1458)
femmesh.addFace([650, 798, 337, 2772, 2769, 2911], 1459)
femmesh.addFace([615, 820, 266, 2912, 2782, 2712], 1460)
femmesh.addFace([346, 820, 615, 2858, 2912, 1184], 1461)
femmesh.addFace([682, 725, 401, 2895, 2702, 2093], 1462)
femmesh.addFace([602, 808, 363, 2879, 1588, 1292], 1463)
femmesh.addFace([717, 822, 343, 2722, 2913, 1417], 1464)
femmesh.addFace([689, 826, 512, 2914, 2765, 2724], 1465)
femmesh.addFace([338, 783, 624, 2761, 2860, 2834], 1466)
femmesh.addFace([628, 831, 52, 2915, 2540, 2660], 1467)
femmesh.addFace([321, 843, 593, 2916, 2885, 2727], 1468)
femmesh.addFace([191, 839, 657, 2215, 2917, 2527], 1469)
femmesh.addFace([657, 839, 362, 2917, 2918, 2637], 1470)
femmesh.addFace([209, 845, 645, 2780, 2669, 1683], 1471)
femmesh.addFace([328, 764, 654, 2884, 2892, 2864], 1472)
femmesh.addFace([827, 851, 272, 2896, 2139, 2069], 1473)
femmesh.addFace([796, 818, 396, 2919, 2458, 1668], 1474)
femmesh.addFace([388, 735, 728, 2845, 2920, 1607], 1475)
femmesh.addFace([97, 818, 796, 2817, 2919, 2692], 1476)
femmesh.addFace([687, 803, 378, 2921, 2511, 1539], 1477)
femmesh.addFace([392, 803, 687, 2444, 2921, 1848], 1478)
femmesh.addFace([764, 833, 257, 2922, 2627, 2893], 1479)
femmesh.addFace([329, 821, 626, 2545, 2901, 2368], 1480)
femmesh.addFace([342, 831, 628, 2870, 2915, 2909], 1481)
femmesh.addFace([704, 747, 151, 2868, 2784, 2424], 1482)
femmesh.addFace([709, 815, 241, 2923, 2430, 1940], 1483)
femmesh.addFace([337, 806, 650, 2859, 2878, 2911], 1484)
femmesh.addFace([728, 735, 335, 2920, 1076, 1464], 1485)
femmesh.addFace([681, 822, 269, 2924, 2815, 2557], 1486)
femmesh.addFace([637, 849, 327, 2910, 2925, 2219], 1487)
femmesh.addFace([44, 787, 746, 2814, 2926, 2570], 1488)
femmesh.addFace([699, 771, 398, 2888, 2044, 2809], 1489)
femmesh.addFace([746, 787, 375, 2926, 2847, 2497], 1490)
femmesh.addFace([639, 847, 323, 2882, 2502, 1434], 1491)
femmesh.addFace([262, 804, 776, 2811, 2927, 2614], 1492)
femmesh.addFace([776, 804, 387, 2927, 1366, 1676], 1493)
femmesh.addFace([403, 827, 781, 2897, 2928, 1879], 1494)
femmesh.addFace([343, 822, 681, 2913, 2924, 2526], 1495)
femmesh.addFace([341, 826, 689, 2554, 2914, 894], 1496)
femmesh.addFace([342, 815, 709, 2908, 2923, 2752], 1497)
femmesh.addFace([748, 843, 321, 2929, 2916, 2042], 1498)
femmesh.addFace([441, 843, 748, 2427, 2929, 2402], 1499)
femmesh.addFace([359, 813, 731, 2905, 2930, 2743], 1500)
femmesh.addFace([731, 813, 249, 2930, 2335, 2307], 1501)
femmesh.addFace([368, 833, 764, 2747, 2922, 2856], 1502)
femmesh.addFace([375, 834, 751, 1926, 2931, 2496], 1503)
femmesh.addFace([752, 835, 376, 2932, 1121, 2498], 1504)
femmesh.addFace([716, 850, 230, 2933, 2479, 2831], 1505)
femmesh.addFace([415, 835, 752, 2793, 2932, 2104], 1506)
femmesh.addFace([751, 834, 414, 2931, 2810, 2103], 1507)
femmesh.addFace([372, 850, 716, 1047, 2933, 1321], 1508)
femmesh.addFace([438, 823, 766, 2377, 2906, 2342], 1509)
femmesh.addFace([327, 849, 765, 2925, 2934, 2593], 1510)
femmesh.addFace([781, 827, 352, 2928, 1268, 1995], 1511)
femmesh.addFace([765, 849, 453, 2934, 2542, 2530], 1512)
femmesh.addFace([440, 848, 839, 2243, 2935, 2214], 1513)
femmesh.addFace([839, 848, 362, 2935, 2524, 2918], 1514)
return True
|
ext | turbogears | # ext/turbogears.py
# Copyright (C) 2006-2016 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from mako import compat
from mako.lookup import TemplateLookup
from mako.template import Template
class TGPlugin(object):
"""TurboGears compatible Template Plugin."""
def __init__(self, extra_vars_func=None, options=None, extension="mak"):
self.extra_vars_func = extra_vars_func
self.extension = extension
if not options:
options = {}
# Pull the options out and initialize the lookup
lookup_options = {}
for k, v in options.items():
if k.startswith("mako."):
lookup_options[k[5:]] = v
elif k in ["directories", "filesystem_checks", "module_directory"]:
lookup_options[k] = v
self.lookup = TemplateLookup(**lookup_options)
self.tmpl_options = {}
# transfer lookup args to template args, based on those available
# in getargspec
for kw in compat.inspect_getargspec(Template.__init__)[0]:
if kw in lookup_options:
self.tmpl_options[kw] = lookup_options[kw]
def load_template(self, templatename, template_string=None):
"""Loads a template from a file or a string"""
if template_string is not None:
return Template(template_string, **self.tmpl_options)
# Translate TG dot notation to normal / template path
if "/" not in templatename:
templatename = "/" + templatename.replace(".", "/") + "." + self.extension
# Lookup template
return self.lookup.get_template(templatename)
def render(self, info, format="html", fragment=False, template=None):
if isinstance(template, compat.string_types):
template = self.load_template(template)
# Load extra vars func if provided
if self.extra_vars_func:
info.update(self.extra_vars_func())
return template.render(**info)
|
saveddata | skill | # ===============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of eos.
#
# eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with eos. If not, see <http://www.gnu.org/licenses/>.
# ===============================================================================
import datetime
from eos.db import saveddata_meta
from eos.saveddata.character import Skill
from sqlalchemy import Column, DateTime, ForeignKey, Integer, Table
from sqlalchemy.orm import mapper
skills_table = Table(
"characterSkills",
saveddata_meta,
Column("characterID", ForeignKey("characters.ID"), primary_key=True, index=True),
Column("itemID", Integer, primary_key=True),
Column("_Skill__level", Integer, nullable=True),
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
)
mapper(Skill, skills_table)
|
accounts | UpstoreNet | # -*- coding: utf-8 -*-
import re
import time
from ..base.account import BaseAccount
class UpstoreNet(BaseAccount):
__name__ = "UpstoreNet"
__type__ = "account"
__version__ = "0.01"
__status__ = "testing"
__description__ = """Upstore.net account plugin"""
__license__ = "GPLv3"
__authors__ = [("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
def grab_info(self, user, password, data):
validuntil = None
trafficleft = None
premium = True
html = self.load("https://upstore.net/stat/download", get={"lang": "en"})
m = re.search(r"Downloaded in last 24 hours: ([\d.,]+) of ([\d.,]+) GB", html)
if m is not None:
trafficleft = self.parse_traffic(m.group(2), "GB") - self.parse_traffic(
m.group(1), "GB"
)
if "eternal premium" in html:
validuntil = -1
else:
m = re.search(r"premium till\s*(\d{1,2}/\d{1,2}/\d{2})", html)
if m is not None:
validuntil = time.mktime(
time.strptime(m.group(1) + " 23:59:59", "%m/%d/%y %H:%M:%S")
)
else:
m = re.search(
r"premium till\s*([a-zA-Z.]+\s*\d{1,2}\s*,\s*(\d{4}|\d{2}))", html
)
if m is not None:
validuntil = time.mktime(
time.strptime(m.group(1) + " 23:59:59", "%B %d , %y %H:%M:%S")
)
return {
"validuntil": validuntil,
"trafficleft": trafficleft,
"premium": premium,
}
def signin(self, user, password, data):
login_url = "https://upstore.net/account/login"
html = self.load(login_url)
if "/account/logout" in html:
self.skip_login()
html = self.load(
login_url,
post={
"url": "https://upstore.net",
"email": user,
"password": password,
"send": "Login",
},
)
if "/account/logout" not in html:
self.fail_login()
|
fitEwarStats | graph | # =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
import wx
from graphs.data.base import FitGraph, Input, XDef, YDef
from .getter import (
Distance2DampStrLockRangeGetter,
Distance2EcmStrMaxGetter,
Distance2GdStrRangeGetter,
Distance2NeutingStrGetter,
Distance2TdStrOptimalGetter,
Distance2TpStrGetter,
Distance2WebbingStrGetter,
)
_t = wx.GetTranslation
class FitEwarStatsGraph(FitGraph):
# UI stuff
internalName = "ewarStatsGraph"
name = _t("Electronic Warfare Stats")
xDefs = [
XDef(
handle="distance",
unit="km",
label=_t("Distance"),
mainInput=("distance", "km"),
)
]
yDefs = [
YDef(
handle="neutStr",
unit=None,
label=_t("Cap neutralized per second"),
selectorLabel=_t("Neuts: cap per second"),
),
YDef(
handle="webStr",
unit="%",
label=_t("Speed reduction"),
selectorLabel=_t("Webs: speed reduction"),
),
YDef(
handle="ecmStrMax",
unit=None,
label=_t("Combined ECM strength"),
selectorLabel=_t("ECM: combined strength"),
),
YDef(
handle="dampStrLockRange",
unit="%",
label=_t("Lock range reduction"),
selectorLabel=_t("Damps: lock range reduction"),
),
YDef(
handle="tdStrOptimal",
unit="%",
label=_t("Turret optimal range reduction"),
selectorLabel=_t("TDs: turret optimal range reduction"),
),
YDef(
handle="gdStrRange",
unit="%",
label=_t("Missile flight range reduction"),
selectorLabel=_t("GDs: missile flight range reduction"),
),
YDef(
handle="tpStr",
unit="%",
label=_t("Signature radius increase"),
selectorLabel=_t("TPs: signature radius increase"),
),
]
inputs = [
Input(
handle="distance",
unit="km",
label=_t("Distance"),
iconID=1391,
defaultValue=None,
defaultRange=(0, 100),
),
Input(
handle="resist",
unit="%",
label=_t("Target resistance"),
iconID=1393,
defaultValue=0,
defaultRange=(0, 100),
),
]
# Calculation stuff
_normalizers = {
("distance", "km"): lambda v, src, tgt: None if v is None else v * 1000,
("resist", "%"): lambda v, src, tgt: None if v is None else v / 100,
}
_limiters = {"resist": lambda src, tgt: (0, 1)}
_getters = {
("distance", "neutStr"): Distance2NeutingStrGetter,
("distance", "webStr"): Distance2WebbingStrGetter,
("distance", "ecmStrMax"): Distance2EcmStrMaxGetter,
("distance", "dampStrLockRange"): Distance2DampStrLockRangeGetter,
("distance", "tdStrOptimal"): Distance2TdStrOptimalGetter,
("distance", "gdStrRange"): Distance2GdStrRangeGetter,
("distance", "tpStr"): Distance2TpStrGetter,
}
_denormalizers = {
("distance", "km"): lambda v, src, tgt: None if v is None else v / 1000
}
|
GraphicsBindings | JoystickView | #
# JoystickView.py
# GraphicsBindings
#
# Converted by u.fiedler on feb 2005
# with great help from Bob Ippolito - Thank you Bob!
#
# The original version was written in Objective-C by Malcolm Crawford
# http://homepage.mac.com/mmalc/CocoaExamples/controllers.html
from math import atan2, cos, pi, sin, sqrt
from AppKit import *
from Foundation import *
from objc import ivar
class JoystickView(NSView):
AngleObservationContext = 2091
OffsetObservationContext = 2092
maxOffset = ivar("maxOffset", "d")
angle = ivar("angle") # , 'd') # expect angle in degrees
offset = ivar("offset") # , 'd')
observedObjectForAngle = ivar("observedObjectForAngle")
observedKeyPathForAngle = ivar("observedKeyPathForAngle")
angleValueTransformerName = ivar("angleValueTransformerName")
badSelectionForAngle = ivar("badSelectionForAngle")
multipleSelectionForAngle = ivar("multipleSelectionForAngle")
allowsMultipleSelectionForAngle = ivar("allowsMultipleSelectionForAngle")
observedObjectForOffset = ivar("observedObjectForOffset")
observedKeyPathForOffset = ivar("observedKeyPathForOffset")
offsetValueTransformerName = ivar("offsetValueTransformerName")
badSelectionForOffset = ivar("badSelectionForOffset")
multipleSelectionForOffset = ivar("multipleSelectionForOffset")
allowsMultipleSelectionForOffset = ivar("allowsMultipleSelectionForOffset")
def valueClassForBinding_(cls, binding):
# both require numbers
return NSNumber
valueClassForBinding_ = classmethod(valueClassForBinding_)
def initWithFrame_(self, frameRect):
self = super(JoystickView, self).initWithFrame_(frameRect)
if self is None:
return None
self.maxOffset = 15.0
self.offset = 0.0
self.angle = 28.0
self.multipleSelectionForAngle = False
self.multipleSelectionForOffset = False
return self
def bind_toObject_withKeyPath_options_(
self, bindingName, observableController, keyPath, options
):
if bindingName == "angle":
# observe the controller for changes -- note, pass binding identifier
# as the context, so we get that back in observeValueForKeyPath:...
# that way we can determine what needs to be updated.
observableController.addObserver_forKeyPath_options_context_(
self, keyPath, 0, self.AngleObservationContext
)
# register what controller and what keypath are
# associated with this binding
self.observedObjectForAngle = observableController
self.observedKeyPathForAngle = keyPath
# options
self.angleValueTransformerName = options["NSValueTransformerName"]
self.allowsMultipleSelectionForAngle = False
if options["NSAllowsEditingMultipleValuesSelection"]:
self.allowsMultipleSelectionForAngle = True
if bindingName == "offset":
observableController.addObserver_forKeyPath_options_context_(
self, keyPath, 0, self.OffsetObservationContext
)
self.observedObjectForOffset = observableController
self.observedKeyPathForOffset = keyPath
self.allowsMultipleSelectionForOffset = False
if options["NSAllowsEditingMultipleValuesSelection"]:
self.allowsMultipleSelectionForOffset = True
def unbind_(self, bindingName):
if bindingName == "angle":
if self.observedObjectForAngle is None:
return
self.observedObjectForAngle.removeObserver_forKeyPath_(
self, self.observedKeyPathForAngle
)
self.observedObjectForAngle = None
self.observedKeyPathForAngle = None
self.angleValueTransformerName = None
elif bindingName == "offset":
if self.observedObjectForOffset is None:
return None
self.observedObjectForOffset.removeObserver_forKeyPath_(
self, self.observedKeyPathForOffset
)
self.observedObjectForOffset = None
self.observedKeyPathForOffset = None
def observeValueForKeyPath_ofObject_change_context_(
self, keyPath, object, change, context
):
# we passed the binding as the context when we added ourselves
# as an observer -- use that to decide what to update...
# should ask the dictionary for the value...
if context == self.AngleObservationContext:
# angle changed
# if we got a NSNoSelectionMarker or NSNotApplicableMarker, or
# if we got a NSMultipleValuesMarker and we don't allow multiple selections
# then note we have a bad angle
newAngle = self.observedObjectForAngle.valueForKeyPath_(
self.observedKeyPathForAngle
)
if (
newAngle == NSNoSelectionMarker
or newAngle == NSNotApplicableMarker
or (
newAngle == NSMultipleValuesMarker
and not self.allowsMultipleSelectionForAngle
)
):
self.badSelectionForAngle = True
else:
# note we have a good selection
# if we got a NSMultipleValuesMarker, note it but don't update value
self.badSelectionForAngle = False
if newAngle == NSMultipleValuesMarker:
self.multipleSelectionForAngle = True
else:
self.multipleSelectionForAngle = False
if self.angleValueTransformerName is not None:
vt = NSValueTransformer.valueTransformerForName_(
self.angleValueTransformerName
)
newAngle = vt.transformedValue_(newAngle)
self.setValue_forKey_(newAngle, "angle")
if context == self.OffsetObservationContext:
# offset changed
# if we got a NSNoSelectionMarker or NSNotApplicableMarker, or
# if we got a NSMultipleValuesMarker and we don't allow multiple selections
# then note we have a bad selection
newOffset = self.observedObjectForOffset.valueForKeyPath_(
self.observedKeyPathForOffset
)
if (
newOffset == NSNoSelectionMarker
or newOffset == NSNotApplicableMarker
or (
newOffset == NSMultipleValuesMarker
and not self.allowsMultipleSelectionForOffset
)
):
self.badSelectionForOffset = True
else:
# note we have a good selection
# if we got a NSMultipleValuesMarker, note it but don't update value
self.badSelectionForOffset = False
if newOffset == NSMultipleValuesMarker:
self.multipleSelectionForOffset = True
else:
self.setValue_forKey_(newOffset, "offset")
self.multipleSelectionForOffset = False
self.setNeedsDisplay_(True)
def updateForMouseEvent_(self, event):
"""
update based on event location and selection state
behavior based on modifier key
"""
if self.badSelectionForAngle or self.badSelectionForOffset:
return # don't do anything
# find out where the event is, offset from the view center
p = self.convertPoint_fromView_(event.locationInWindow(), None)
myBounds = self.bounds()
xOffset = p.x - (myBounds.size.width / 2)
yOffset = p.y - (myBounds.size.height / 2)
newOffset = sqrt(xOffset * xOffset + yOffset * yOffset)
if newOffset > self.maxOffset:
newOffset = self.maxOffset
elif newOffset < -self.maxOffset:
newOffset = -self.maxOffset
# if we have a multiple selection for offset and Shift key is pressed
# then don't update the offset
# this allows offsets to remain constant, but change angle
if not (
self.multipleSelectionForOffset and (event.modifierFlags() & NSShiftKeyMask)
):
self.offset = newOffset
# update observed controller if set
if self.observedObjectForOffset is not None:
self.observedObjectForOffset.setValue_forKeyPath_(
newOffset, self.observedKeyPathForOffset
)
# if we have a multiple selection for angle and Shift key is pressed
# then don't update the angle
# this allows angles to remain constant, but change offset
if not (
self.multipleSelectionForAngle and (event.modifierFlags() & NSShiftKeyMask)
):
newAngle = atan2(xOffset, yOffset)
newAngleDegrees = newAngle / (pi / 180.0)
if newAngleDegrees < 0:
newAngleDegrees += 360
self.angle = newAngleDegrees
# update observed controller if set
if self.observedObjectForAngle is not None:
if self.observedObjectForAngle is not None:
vt = NSValueTransformer.valueTransformerForName_(
self.angleValueTransformerName
)
newControllerAngle = vt.reverseTransformedValue_(newAngleDegrees)
else:
newControllerAngle = angle
self.observedObjectForAngle.setValue_forKeyPath_(
newControllerAngle, self.observedKeyPathForAngle
)
self.setNeedsDisplay_(True)
def mouseDown_(self, event):
self.mouseDown = True
self.updateForMouseEvent_(event)
def mouseDragged_(self, event):
self.updateForMouseEvent_(event)
def mouseUp_(self, event):
self.mouseDown = False
self.updateForMouseEvent_(event)
def acceptsFirstMouse_(self, event):
return True
def acceptsFirstResponder(self):
return True
def drawRect_(self, rect):
"""
Basic goals here:
If either the angle or the offset has a "bad selection":
then draw a gray rectangle, and that's it.
Note: bad selection is set if there's a multiple selection
but the "allows multiple selection" binding is NO.
If there's a multiple selection for either angle or offset:
then what you draw depends on what's multiple.
- First, draw a white background to show all's OK.
- If both are multiple, then draw a special symbol.
- If offset is multiple, draw a line from the center of the view
- to the edge at the shared angle.
- If angle is multiple, draw a circle of radius the shared offset
- centered in the view.
If neither is multiple, draw a cross at the center of the view
and a cross at distance 'offset' from the center at angle 'angle'
"""
myBounds = self.bounds()
if self.badSelectionForAngle or self.badSelectionForOffset:
# "disable" and exit
NSDrawDarkBezel(myBounds, myBounds)
return
# user can do something, so draw white background and
# clip in anticipation of future drawing
NSDrawLightBezel(myBounds, myBounds)
clipRect = NSBezierPath.bezierPathWithRect_(NSInsetRect(myBounds, 2.0, 2.0))
clipRect.addClip()
if self.multipleSelectionForAngle or self.multipleSelectionForOffset:
originOffsetX = myBounds.size.width / 2 + 0.5
originOffsetY = myBounds.size.height / 2 + 0.5
if self.multipleSelectionForAngle and self.multipleSelectionForOffset:
# draw a diagonal line and circle to denote
# multiple selections for angle and offset
NSBezierPath.strokeLineFromPoint_toPoint_(
NSMakePoint(0, 0),
NSMakePoint(myBounds.size.width, myBounds.size.height),
)
circleBounds = NSMakeRect(originOffsetX - 5, originOffsetY - 5, 10, 10)
path = NSBezierPath.bezierPathWithOvalInRect_(circleBounds)
path.stroke()
return
if self.multipleSelectionForOffset:
# draw a line from center to a point outside
# bounds in the direction specified by angle
angleRadians = self.angle * (pi / 180.0)
x = sin(angleRadians) * myBounds.size.width + originOffsetX
y = cos(angleRadians) * myBounds.size.height + originOffsetX
NSBezierPath.strokeLineFromPoint_toPoint_(
NSMakePoint(originOffsetX, originOffsetY), NSMakePoint(x, y)
)
return
if self.multipleSelectionForAngle:
# draw a circle with radius the shared offset
# dont' draw radius < 1.0, else invisible
drawRadius = self.offset
if drawRadius < 1.0:
drawRadius = 1.0
offsetBounds = NSMakeRect(
originOffsetX - drawRadius,
originOffsetY - drawRadius,
drawRadius * 2,
drawRadius * 2,
)
path = NSBezierPath.bezierPathWithOvalInRect_(offsetBounds)
path.stroke()
return
# shouldn't get here
return
trans = NSAffineTransform.transform()
trans.translateXBy_yBy_(
myBounds.size.width / 2 + 0.5, myBounds.size.height / 2 + 0.5
)
trans.concat()
path = NSBezierPath.bezierPath()
# draw + where shadow extends
angleRadians = self.angle * (pi / 180.0)
xOffset = sin(angleRadians) * self.offset
yOffset = cos(angleRadians) * self.offset
path.moveToPoint_(NSMakePoint(xOffset, yOffset - 5))
path.lineToPoint_(NSMakePoint(xOffset, yOffset + 5))
path.moveToPoint_(NSMakePoint(xOffset - 5, yOffset))
path.lineToPoint_(NSMakePoint(xOffset + 5, yOffset))
NSColor.lightGrayColor().set()
path.setLineWidth_(1.5)
path.stroke()
# draw + in center of view
path = NSBezierPath.bezierPath()
path.moveToPoint_(NSMakePoint(0, -5))
path.lineToPoint_(NSMakePoint(0, +5))
path.moveToPoint_(NSMakePoint(-5, 0))
path.lineToPoint_(NSMakePoint(+5, 0))
NSColor.blackColor().set()
path.setLineWidth_(1.0)
path.stroke()
def setNilValueForKey_(self, key):
"We may get passed nil for angle or offset. Just use 0"
self.setValue_forKey_(0, key)
def validateMaxOffset_error(self, ioValue):
if ioValue == None:
# trap this in setNilValueForKey
# alternative might be to create new NSNumber with value 0 here
return True
if ioValue <= 0.0:
errorString = NSLocalizedStringFromTable(
"Maximum Offset must be greater than zero",
"Joystick",
"validation: zero maxOffset error",
)
userInfoDict = {NSLocalizedDescriptionKey: errorString}
error = NSError.alloc().initWithDomain_code_userInfo_(
"JoystickView", 1, userInfoDict
)
outError = error
return False
return True
JoystickView.exposeBinding_("offset")
JoystickView.exposeBinding_("angle")
|
http | exceptions | # -*- coding: utf-8 -*-
import http.client
PROPRIETARY_RESPONSES = {
440: "Login Timeout - The client's session has expired and must log in again.",
449: "Retry With - The server cannot honour the request because the user has not provided the required information",
451: "Redirect - Unsupported Redirect Header",
509: "Bandwidth Limit Exceeded",
520: "Unknown Error",
521: "Web Server Is Down - The origin server has refused the connection from CloudFlare",
522: "Connection Timed Out - CloudFlare could not negotiate a TCP handshake with the origin server",
523: "Origin Is Unreachable - CloudFlare could not reach the origin server",
524: "A Timeout Occurred - CloudFlare did not receive a timely HTTP response",
525: "SSL Handshake Failed - CloudFlare could not negotiate a SSL/TLS handshake with the origin server",
526: "Invalid SSL Certificate - CloudFlare could not validate the SSL/TLS certificate that the origin server presented",
527: "Railgun Error - CloudFlare requests timeout or failed after the WAN connection has been established",
530: "Site Is Frozen - Used by the Pantheon web platform to indicate a site that has been frozen due to inactivity",
}
class BadHeader(Exception):
def __init__(self, code, header="", content=""):
code = int(code)
response = http.client.responses.get(
code, PROPRIETARY_RESPONSES.get(code, "unknown error code")
)
super().__init__(f"Bad server response: {code} {response}")
self.code = code
self.header = header
self.content = content
|
base | cache | # =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
class FitDataCache:
def __init__(self):
self._data = {}
def clearForFit(self, fitID):
if fitID in self._data:
del self._data[fitID]
def clearAll(self):
self._data.clear()
|
generators | sweep | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Timothée Lecomte
# This file is part of Friture.
#
# Friture is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# Friture is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Friture. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from PyQt5 import QtWidgets
DEFAULT_SWEEP_STARTFREQUENCY = 20.0
DEFAULT_SWEEP_STOPFREQUENCY = 22000.0
DEFAULT_SWEEP_PERIOD_S = 1.0
class SweepGenerator:
name = "Sweep"
def __init__(self, parent):
self.f1 = 20.0
self.f2 = 22000.0
self.Tuser = 1.0
self.L, self.K, self.T = self.computeParams(self.f1, self.f2, self.Tuser)
self.timeoffset = 0.0
self.nextParams = None
self.settings = SettingsWidget(parent)
self.settings.spinBox_sweep_startfrequency.valueChanged.connect(self.setf1)
self.settings.spinBox_sweep_stopfrequency.valueChanged.connect(self.setf2)
self.settings.spinBox_sweep_period.valueChanged.connect(self.setT)
def settingsWidget(self):
return self.settings
def computeParams(self, f1, f2, T):
# adjust T so that we have an integer number of periods
# we want phase_max to be a multiple of 2*np.pi
# phase_max = 2*np.pi*f1*T/np.log(f2/f1)*(f2/f1 - 1.)
# phase_max = N*2*np.pi
# N = f1*T/np.log(f2/f1)*(f2/f1 - 1.)
Tmult = np.log(f2 / f1) / (f1 * (f2 / f1 - 1.0))
if T >= Tmult:
T = np.round(T / Tmult) * Tmult
else:
T = np.ceil(T / Tmult) * Tmult
w1 = 2 * np.pi * f1
w2 = 2 * np.pi * f2
K = w1 * T / np.log(w2 / w1)
L = T / np.log(w2 / w1)
return L, K, T
def setf1(self, f1):
if self.f1 != f1:
self.f1 = f1
L, K, T = self.computeParams(self.f1, self.f2, self.Tuser)
self.nextParams = [L, K, T]
def setf2(self, f2):
if self.f2 != f2:
self.f2 = f2
L, K, T = self.computeParams(self.f1, self.f2, self.Tuser)
self.nextParams = [L, K, T]
def setT(self, T):
if self.Tuser != T:
self.Tuser = T
L, K, T = self.computeParams(self.f1, self.f2, self.Tuser)
self.nextParams = [L, K, T]
def signal(self, t):
# https://ccrma.stanford.edu/realsimple/imp_meas/Sine_Sweep_Measurement_Theory.html
# f = (self.f2 - self.f1)*(1. + np.sin(2*np.pi*t/self.T))/2. + self.f1
# return np.sin(2*np.pi*t*f)
result = np.cos(
self.K * (np.exp((t - self.timeoffset) % self.T / self.L) - 1.0)
)
if self.nextParams is not None:
# we have new params to put in place
# do it at the first max
diff = result[1:] - result[:-1]
maxdetection = 0.0 * diff[:-1] + (diff[1:] < 0.0) * (diff[:-1] > 0.0) * 1.0
maxdetections = np.argwhere(maxdetection != 0.0)
if len(maxdetections) > 0:
firstmaxpos = np.argwhere(maxdetection != 0.0)[0][0] + 1
# the first samples use the previous parameters
# the next samples use the new parameters
[self.L, self.K, self.T] = self.nextParams
self.nextParams = None
self.timeoffset = t[firstmaxpos]
result[firstmaxpos:] = np.cos(
self.K
* (
np.exp((t[firstmaxpos:] - self.timeoffset) % self.T / self.L)
- 1.0
)
)
return result
class SettingsWidget(QtWidgets.QWidget):
def __init__(self, parent):
super().__init__(parent)
self.spinBox_sweep_startfrequency = QtWidgets.QSpinBox(self)
self.spinBox_sweep_startfrequency.setKeyboardTracking(False)
self.spinBox_sweep_startfrequency.setMinimum(20)
self.spinBox_sweep_startfrequency.setMaximum(22000)
self.spinBox_sweep_startfrequency.setProperty(
"value", DEFAULT_SWEEP_STARTFREQUENCY
)
self.spinBox_sweep_startfrequency.setObjectName("spinBox_sweep_startfrequency")
self.spinBox_sweep_startfrequency.setSuffix(" Hz")
self.spinBox_sweep_stopfrequency = QtWidgets.QSpinBox(self)
self.spinBox_sweep_stopfrequency.setKeyboardTracking(False)
self.spinBox_sweep_stopfrequency.setMinimum(20)
self.spinBox_sweep_stopfrequency.setMaximum(22000)
self.spinBox_sweep_stopfrequency.setProperty(
"value", DEFAULT_SWEEP_STOPFREQUENCY
)
self.spinBox_sweep_stopfrequency.setObjectName("spinBox_sweep_stopfrequency")
self.spinBox_sweep_stopfrequency.setSuffix(" Hz")
self.spinBox_sweep_period = QtWidgets.QDoubleSpinBox(self)
self.spinBox_sweep_period.setKeyboardTracking(False)
self.spinBox_sweep_period.setDecimals(2)
self.spinBox_sweep_period.setSingleStep(1)
self.spinBox_sweep_period.setMinimum(0.01)
self.spinBox_sweep_period.setMaximum(60)
self.spinBox_sweep_period.setProperty("value", DEFAULT_SWEEP_PERIOD_S)
self.spinBox_sweep_period.setObjectName("spinBox_sweep_period")
self.spinBox_sweep_period.setSuffix(" s")
self.formLayout = QtWidgets.QFormLayout(self)
self.formLayout.addRow("Start frequency:", self.spinBox_sweep_startfrequency)
self.formLayout.addRow("Stop frequency:", self.spinBox_sweep_stopfrequency)
self.formLayout.addRow("Period:", self.spinBox_sweep_period)
self.setLayout(self.formLayout)
def saveState(self, settings):
settings.setValue(
"sweep start frequency", self.spinBox_sweep_startfrequency.value()
)
settings.setValue(
"sweep stop frequency", self.spinBox_sweep_stopfrequency.value()
)
settings.setValue("sweep period", self.spinBox_sweep_period.value())
def restoreState(self, settings):
sweep_start_frequency = settings.value(
"sweep start frequency", DEFAULT_SWEEP_STARTFREQUENCY, type=int
)
self.spinBox_sweep_startfrequency.setValue(sweep_start_frequency)
sweep_stop_frequency = settings.value(
"sweep stop frequency", DEFAULT_SWEEP_STOPFREQUENCY, type=int
)
self.spinBox_sweep_stopfrequency.setValue(sweep_stop_frequency)
sweep_period = settings.value(
"sweep period", DEFAULT_SWEEP_PERIOD_S, type=float
)
self.spinBox_sweep_period.setValue(sweep_period)
|
libs | graphicseffects | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#######################################################################
#
# VidCutter - media cutter & joiner
#
# copyright © 2018 Pete Alexandrou
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#######################################################################
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QTransform
from PyQt5.QtWidgets import QGraphicsEffect
class OpacityEffect(QGraphicsEffect):
def __init__(self, opacity: float = 0.6):
super(OpacityEffect, self).__init__()
self.opacity = opacity
def draw(self, painter: QPainter) -> None:
if self.sourceIsPixmap():
pixmap, offset = self.sourcePixmap(
Qt.LogicalCoordinates, QGraphicsEffect.PadToEffectiveBoundingRect
)
else:
pixmap, offset = self.sourcePixmap(
Qt.DeviceCoordinates, QGraphicsEffect.PadToEffectiveBoundingRect
)
painter.setWorldTransform(QTransform())
painter.setBrush(Qt.black)
painter.drawRect(pixmap.rect())
painter.setOpacity(self.opacity)
painter.drawPixmap(offset, pixmap)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.