section stringlengths 2 30 | filename stringlengths 1 82 | text stringlengths 783 28M |
|---|---|---|
nfoview | preferences | # -*- coding: utf-8 -*-
# Copyright (C) 2008 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import nfoview
from gi.repository import GObject, Gtk
from nfoview.i18n import _
def boxwrap(widget):
# Needed to get widget natural-size left-aligned in grid.
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=0)
box.append(widget)
return box
def build_label(text):
label = Gtk.Label.new(text)
label.add_css_class("dim-label")
label.set_xalign(1)
return label
class PreferencesDialog(Gtk.Dialog):
def __init__(self, parent):
GObject.GObject.__init__(self, use_header_bar=True)
self.set_resizable(False)
self.set_title(_("Preferences"))
self.set_transient_for(parent)
grid = Gtk.Grid()
grid.set_column_spacing(18)
grid.set_margin_bottom(18)
grid.set_margin_end(18)
grid.set_margin_start(18)
grid.set_margin_top(18)
grid.set_row_homogeneous(True)
grid.set_row_spacing(12)
# Font
self._font_label = build_label(_("Font"))
self._font_button = Gtk.FontButton()
def monospace(family, *args, **kwargs):
return family.is_monospace()
self._font_button.set_filter_func(monospace, None)
self._font_button.set_font(nfoview.conf.font)
self._font_button.connect("font-set", self._on_font_button_font_set)
grid.attach(self._font_label, 0, 0, 1, 1)
grid.attach(boxwrap(self._font_button), 1, 0, 1, 1)
# Line-spacing
self._line_spacing_label = build_label(_("Line-spacing"))
self._line_spacing_spin = Gtk.SpinButton.new_with_range(-99, 99, 1)
self._line_spacing_spin.set_value(nfoview.conf.pixels_above_lines)
self._line_spacing_spin.connect(
"value-changed", self._on_line_spacing_spin_value_changed
)
grid.attach(self._line_spacing_label, 0, 1, 1, 1)
grid.attach(boxwrap(self._line_spacing_spin), 1, 1, 1, 1)
# Color scheme
self._scheme_label = build_label(_("Color scheme"))
self._scheme_combo = Gtk.ComboBoxText.new()
for i, scheme in enumerate(nfoview.schemes.get_all()):
self._scheme_combo.append_text(scheme.label)
if scheme.name == nfoview.conf.color_scheme:
self._scheme_combo.set_active(i)
self._scheme_combo.connect("changed", self._on_scheme_combo_changed)
grid.attach(self._scheme_label, 0, 2, 1, 1)
grid.attach(boxwrap(self._scheme_combo), 1, 2, 1, 1)
# Foreground
self._fg_color_label = build_label(_("Foreground"))
self._fg_color_button = Gtk.ColorButton()
color = nfoview.util.hex_to_rgba(nfoview.conf.foreground_color)
self._fg_color_button.set_rgba(color)
self._fg_color_button.connect("color-set", self._on_fg_color_button_color_set)
grid.attach(self._fg_color_label, 0, 3, 1, 1)
grid.attach(boxwrap(self._fg_color_button), 1, 3, 1, 1)
# Background
self._bg_color_label = build_label(_("Background"))
self._bg_color_button = Gtk.ColorButton()
color = nfoview.util.hex_to_rgba(nfoview.conf.background_color)
self._bg_color_button.set_rgba(color)
self._bg_color_button.connect("color-set", self._on_bg_color_button_color_set)
grid.attach(self._bg_color_label, 0, 4, 1, 1)
grid.attach(boxwrap(self._bg_color_button), 1, 4, 1, 1)
# Link
self._link_color_label = build_label(_("Link"))
self._link_color_button = Gtk.ColorButton()
color = nfoview.util.hex_to_rgba(nfoview.conf.link_color)
self._link_color_button.set_rgba(color)
self._link_color_button.connect(
"color-set", self._on_link_color_button_color_set
)
grid.attach(self._link_color_label, 0, 5, 1, 1)
grid.attach(boxwrap(self._link_color_button), 1, 5, 1, 1)
# Visited link
self._vlink_color_label = build_label(_("Visited link"))
self._vlink_color_button = Gtk.ColorButton()
color = nfoview.util.hex_to_rgba(nfoview.conf.visited_link_color)
self._vlink_color_button.set_rgba(color)
self._vlink_color_button.connect(
"color-set", self._on_vlink_color_button_color_set
)
grid.attach(self._vlink_color_label, 0, 6, 1, 1)
grid.attach(boxwrap(self._vlink_color_button), 1, 6, 1, 1)
self._update_sensitivities()
self.set_child(grid)
self.show()
def _get_windows(self):
return nfoview.app.get_windows() if hasattr(nfoview, "app") else []
def _on_bg_color_button_color_set(self, color_button):
color = color_button.get_rgba()
color = nfoview.util.rgba_to_hex(color)
nfoview.conf.background_color = color
scheme = nfoview.schemes.get("custom")
scheme.background = color
for window in self._get_windows():
window.view.update_style()
def _on_font_button_font_set(self, font_button):
nfoview.conf.font = font_button.get_font()
for window in self._get_windows():
window.view.update_style()
def _on_fg_color_button_color_set(self, color_button):
color = color_button.get_rgba()
color = nfoview.util.rgba_to_hex(color)
nfoview.conf.foreground_color = color
scheme = nfoview.schemes.get("custom")
scheme.foreground = color
for window in self._get_windows():
window.view.update_style()
def _on_line_spacing_spin_value_changed(self, spin_button):
pixels = spin_button.get_value_as_int()
nfoview.conf.pixels_above_lines = pixels
for window in self._get_windows():
window.view.set_pixels_above_lines(pixels)
def _on_link_color_button_color_set(self, color_button):
color = color_button.get_rgba()
color = nfoview.util.rgba_to_hex(color)
nfoview.conf.link_color = color
scheme = nfoview.schemes.get("custom")
scheme.link = color
for window in self._get_windows():
window.view.update_style()
def _on_scheme_combo_changed(self, combo_box):
index = combo_box.get_active()
scheme = nfoview.schemes.get_all()[index]
nfoview.conf.color_scheme = scheme.name
self._update_color_buttons(scheme)
for window in self._get_windows():
window.view.update_style()
self._update_sensitivities()
def _on_vlink_color_button_color_set(self, color_button):
color = color_button.get_rgba()
color = nfoview.util.rgba_to_hex(color)
nfoview.conf.visited_link_color = color
scheme = nfoview.schemes.get("custom")
scheme.vlink = color
for window in self._get_windows():
window.view.update_style()
def _update_color_buttons(self, scheme):
rgba = nfoview.util.hex_to_rgba
self._bg_color_button.set_rgba(rgba(scheme.background))
self._fg_color_button.set_rgba(rgba(scheme.foreground))
self._link_color_button.set_rgba(rgba(scheme.link))
self._vlink_color_button.set_rgba(rgba(scheme.visited_link))
def _update_sensitivities(self):
sensitive = nfoview.conf.color_scheme == "custom"
self._bg_color_button.set_sensitive(sensitive)
self._fg_color_button.set_sensitive(sensitive)
self._link_color_button.set_sensitive(sensitive)
self._vlink_color_button.set_sensitive(sensitive)
|
serializers | channel_filter | import typing
from apps.alerts.models import AlertReceiveChannel, ChannelFilter, EscalationChain
from apps.api.serializers.alert_receive_channel import (
valid_jinja_template_for_serializer_method_field,
)
from apps.base.messaging import get_messaging_backend_from_id
from apps.telegram.models import TelegramToOrganizationConnector
from common.api_helpers.custom_fields import OrganizationFilteredPrimaryKeyRelatedField
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.mixins import EagerLoadingMixin
from common.jinja_templater.apply_jinja_template import JinjaTemplateError
from common.utils import is_regex_valid
from rest_framework import serializers
class ChannelFilterSerializer(EagerLoadingMixin, serializers.ModelSerializer):
id = serializers.CharField(read_only=True, source="public_primary_key")
alert_receive_channel = OrganizationFilteredPrimaryKeyRelatedField(
queryset=AlertReceiveChannel.objects
)
escalation_chain = OrganizationFilteredPrimaryKeyRelatedField(
queryset=EscalationChain.objects,
filter_field="organization",
allow_null=True,
required=False,
)
slack_channel = serializers.SerializerMethodField()
# Duplicated telegram channel and telegram_channel_details field for backwards compatibility for old integration page
telegram_channel = OrganizationFilteredPrimaryKeyRelatedField(
queryset=TelegramToOrganizationConnector.objects,
filter_field="organization",
allow_null=True,
required=False,
)
telegram_channel_details = serializers.SerializerMethodField()
filtering_term_as_jinja2 = serializers.SerializerMethodField()
filtering_term = serializers.CharField(
required=False, allow_null=True, allow_blank=True
)
SELECT_RELATED = ["escalation_chain", "alert_receive_channel"]
class Meta:
model = ChannelFilter
fields = [
"id",
"alert_receive_channel",
"escalation_chain",
"slack_channel",
"created_at",
"filtering_term",
"filtering_term_type",
"telegram_channel",
"is_default",
"notify_in_slack",
"notify_in_telegram",
"notification_backends",
"filtering_term_as_jinja2",
"telegram_channel_details",
]
read_only_fields = [
"created_at",
"is_default",
"telegram_channel_details",
]
def validate(self, data):
filtering_term = data.get("filtering_term")
filtering_term_type = data.get("filtering_term_type")
if filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_JINJA2:
try:
valid_jinja_template_for_serializer_method_field(
{"route_template": filtering_term}
)
except JinjaTemplateError:
raise serializers.ValidationError(["Jinja template is incorrect"])
elif (
filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_REGEX
or filtering_term_type is None
):
if filtering_term is not None:
if not is_regex_valid(filtering_term):
raise serializers.ValidationError(
["Regular expression is incorrect"]
)
else:
raise serializers.ValidationError(["Expression type is incorrect"])
return data
def get_slack_channel(self, obj):
if obj.slack_channel_id is None:
return None
# display_name and id appears via annotate in ChannelFilterView.get_queryset()
return {
"display_name": obj.slack_channel_name,
"slack_id": obj.slack_channel_id,
"id": obj.slack_channel_pk,
}
def get_telegram_channel_details(self, obj) -> dict[str, typing.Any] | None:
if obj.telegram_channel_id is None:
return None
try:
telegram_channel = TelegramToOrganizationConnector.objects.get(
pk=obj.telegram_channel_id
)
return {
"display_name": telegram_channel.channel_name,
"id": telegram_channel.channel_chat_id,
}
except TelegramToOrganizationConnector.DoesNotExist:
return None
def validate_slack_channel(self, slack_channel_id):
from apps.slack.models import SlackChannel
if slack_channel_id is not None:
slack_channel_id = slack_channel_id.upper()
organization = self.context["request"].auth.organization
try:
organization.slack_team_identity.get_cached_channels().get(
slack_id=slack_channel_id
)
except SlackChannel.DoesNotExist:
raise serializers.ValidationError(["Slack channel does not exist"])
return slack_channel_id
def validate_notification_backends(self, notification_backends):
# NOTE: updates the whole field, handling dict updates per backend
if notification_backends is not None:
organization = self.context["request"].auth.organization
if not isinstance(notification_backends, dict):
raise serializers.ValidationError(["Invalid messaging backend data"])
updated = self.instance.notification_backends or {}
for backend_id in notification_backends:
backend = get_messaging_backend_from_id(backend_id)
if backend is None:
raise serializers.ValidationError(["Invalid messaging backend"])
updated_data = backend.validate_channel_filter_data(
organization,
notification_backends[backend_id],
)
# update existing backend data
updated[backend_id] = updated.get(backend_id, {}) | updated_data
notification_backends = updated
return notification_backends
def get_filtering_term_as_jinja2(self, obj):
"""
Returns the regex filtering term as a jinja2, for the preview before migration from regex to jinja2
"""
if obj.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_JINJA2:
return obj.filtering_term
elif obj.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_REGEX:
# Four curly braces will result in two curly braces in the final string
# rf"..." is a raw f string, to keep original filtering_term
return rf'{{{{ payload | json_dumps | regex_search("{obj.filtering_term}") }}}}'
class ChannelFilterCreateSerializer(ChannelFilterSerializer):
alert_receive_channel = OrganizationFilteredPrimaryKeyRelatedField(
queryset=AlertReceiveChannel.objects
)
slack_channel = serializers.CharField(
allow_null=True, required=False, source="slack_channel_id"
)
filtering_term = serializers.CharField(
required=False, allow_null=True, allow_blank=True
)
class Meta:
model = ChannelFilter
fields = [
"id",
"alert_receive_channel",
"escalation_chain",
"slack_channel",
"created_at",
"filtering_term",
"filtering_term_type",
"telegram_channel",
"is_default",
"notify_in_slack",
"notify_in_telegram",
"notification_backends",
]
read_only_fields = ["created_at", "is_default"]
def to_representation(self, obj):
"""add correct slack channel data to result after instance creation/update"""
result = super().to_representation(obj)
if obj.slack_channel_id is None:
result["slack_channel"] = None
else:
slack_team_identity = self.context[
"request"
].auth.organization.slack_team_identity
if slack_team_identity is not None:
slack_channel = slack_team_identity.get_cached_channels(
slack_id=obj.slack_channel_id
).first()
if slack_channel:
result["slack_channel"] = {
"display_name": slack_channel.name,
"slack_id": obj.slack_channel_id,
"id": slack_channel.public_primary_key,
}
return result
def create(self, validated_data):
instance = super().create(validated_data)
instance.to_index(0) # the new route should be the first one
return instance
class ChannelFilterUpdateSerializer(ChannelFilterCreateSerializer):
alert_receive_channel = OrganizationFilteredPrimaryKeyRelatedField(read_only=True)
class Meta(ChannelFilterCreateSerializer.Meta):
read_only_fields = [
*ChannelFilterCreateSerializer.Meta.read_only_fields,
"alert_receive_channel",
]
extra_kwargs = {"filtering_term": {"required": False}}
def update(self, instance, validated_data):
filtering_term = validated_data.get("filtering_term")
if instance.is_default and filtering_term is not None:
raise BadRequest(
detail="Filtering term of default channel filter cannot be changed"
)
return super().update(instance, validated_data)
|
nfoview | view | # -*- coding: utf-8 -*-
# Copyright (C) 2005 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import nfoview
from gi.repository import Gdk, GObject, Gtk, Pango
class TextView(Gtk.TextView):
def __init__(self):
GObject.GObject.__init__(self)
self._link_tags = []
self._visited_link_tags = []
self.set_bottom_margin(6)
self.set_cursor_visible(False)
self.set_editable(False)
self.set_left_margin(6)
self.set_pixels_above_lines(nfoview.conf.pixels_above_lines)
self.set_pixels_below_lines(nfoview.conf.pixels_below_lines)
self.set_right_margin(6)
self.set_top_margin(6)
self.set_wrap_mode(Gtk.WrapMode.NONE)
controller = Gtk.EventControllerMotion()
controller.set_propagation_phase(Gtk.PropagationPhase.CAPTURE)
self.add_controller(controller)
controller.connect("motion", self._on_motion)
gesture = Gtk.GestureClick()
self.add_controller(gesture)
gesture.connect("pressed", self._on_pressed)
self.update_style()
def get_text(self):
text_buffer = self.get_buffer()
start, end = text_buffer.get_bounds()
return text_buffer.get_text(start, end, False)
def _insert_text(self, text):
text_buffer = self.get_buffer()
itr = text_buffer.get_end_iter()
text_buffer.insert(itr, text)
def _insert_url(self, url):
text_buffer = self.get_buffer()
tag = text_buffer.create_tag(None)
tag.props.underline = Pango.Underline.SINGLE
tag.nfoview_url = url
itr = text_buffer.get_end_iter()
text_buffer.insert_with_tags(itr, url, tag)
self._link_tags.append(tag)
def _on_motion(self, controller, x, y, user_data=None):
window = Gtk.TextWindowType.WIDGET
x, y = self.window_to_buffer_coords(window, x, y)
if iter := self.get_iter_at_location(x, y):
for tag in iter.iter.get_tags():
if hasattr(tag, "nfoview_url"):
return self.set_cursor(Gdk.Cursor.new_from_name("pointer"))
self.set_cursor(Gdk.Cursor.new_from_name("default"))
def _on_pressed(self, gesture, n_press, x, y, user_data=None):
text_buffer = self.get_buffer()
if text_buffer.get_selection_bounds():
return
window = Gtk.TextWindowType.WIDGET
x, y = self.window_to_buffer_coords(window, x, y)
if iter := self.get_iter_at_location(x, y):
for tag in iter.iter.get_tags():
if hasattr(tag, "nfoview_url"):
nfoview.util.show_uri(tag.nfoview_url)
if tag in self._link_tags:
self._link_tags.remove(tag)
self._visited_link_tags.append(tag)
self.update_style()
def set_text(self, text):
URL = r"(\w+://(\S+\.)?\S+|www\.\S+)\.[\w\-.~:/?#\[\]@!$&'()*+,;=%]+"
text_buffer = self.get_buffer()
bounds = text_buffer.get_bounds()
text_buffer.delete(*bounds)
self._link_tags = []
self._visited_link_tags = []
for line in text.splitlines():
i = 0
for match in re.finditer(URL, line):
a, z = match.span()
self._insert_text(line[i:a])
self._insert_url(line[a:z])
i = z
self._insert_text(line[i:])
self._insert_text("\n")
self.update_style()
def update_style(self):
nfoview.util.apply_style(self)
name = nfoview.conf.color_scheme
scheme = nfoview.schemes.get(name, "default")
for tag in self._link_tags:
color = nfoview.util.hex_to_rgba(scheme.link)
tag.props.foreground_rgba = color
for tag in self._visited_link_tags:
color = nfoview.util.hex_to_rgba(scheme.visited_link)
tag.props.foreground_rgba = color
|
gui | Bars | """
Copyright 2007, 2008, 2009, 2015, 2016 Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
import logging
from gi.repository import Gio, GLib, GObject, Gtk
from . import Actions
log = logging.getLogger(__name__)
"""
# Menu/Toolbar Lists:
#
# Sub items can be 1 of 3 types
# - List Creates a section within the current menu
# - Tuple Creates a submenu using a string or action as the parent. The child
# can be another menu list or an identifier used to call a helper function.
# - Action Appends a new menu item to the current menu
#
LIST_NAME = [
[Action1, Action2], # New section
(Action3, [Action4, Action5]), # Submenu with action as parent
("Label", [Action6, Action7]), # Submenu with string as parent
("Label2", "helper") # Submenu with helper function. Calls 'create_helper()'
]
"""
# The list of actions for the toolbar.
TOOLBAR_LIST = [
[
(Actions.FLOW_GRAPH_NEW, "flow_graph_new_type"),
(Actions.FLOW_GRAPH_OPEN, "flow_graph_recent"),
Actions.FLOW_GRAPH_SAVE,
Actions.FLOW_GRAPH_CLOSE,
],
[Actions.TOGGLE_FLOW_GRAPH_VAR_EDITOR, Actions.FLOW_GRAPH_SCREEN_CAPTURE],
[
Actions.BLOCK_CUT,
Actions.BLOCK_COPY,
Actions.BLOCK_PASTE,
Actions.ELEMENT_DELETE,
],
[Actions.FLOW_GRAPH_UNDO, Actions.FLOW_GRAPH_REDO],
[
Actions.ERRORS_WINDOW_DISPLAY,
Actions.FLOW_GRAPH_GEN,
Actions.FLOW_GRAPH_EXEC,
Actions.FLOW_GRAPH_KILL,
],
[Actions.BLOCK_ROTATE_CCW, Actions.BLOCK_ROTATE_CW],
[
Actions.BLOCK_ENABLE,
Actions.BLOCK_DISABLE,
Actions.BLOCK_BYPASS,
Actions.TOGGLE_HIDE_DISABLED_BLOCKS,
],
[Actions.FIND_BLOCKS, Actions.RELOAD_BLOCKS, Actions.OPEN_HIER],
]
# The list of actions and categories for the menu bar.
MENU_BAR_LIST = [
(
"_File",
[
[
(Actions.FLOW_GRAPH_NEW, "flow_graph_new_type"),
Actions.FLOW_GRAPH_DUPLICATE,
Actions.FLOW_GRAPH_OPEN,
(Actions.FLOW_GRAPH_OPEN_RECENT, "flow_graph_recent"),
],
[
Actions.FLOW_GRAPH_SAVE,
Actions.FLOW_GRAPH_SAVE_AS,
Actions.FLOW_GRAPH_SAVE_COPY,
],
[Actions.FLOW_GRAPH_SCREEN_CAPTURE],
[Actions.FLOW_GRAPH_CLOSE, Actions.APPLICATION_QUIT],
],
),
(
"_Edit",
[
[Actions.FLOW_GRAPH_UNDO, Actions.FLOW_GRAPH_REDO],
[
Actions.BLOCK_CUT,
Actions.BLOCK_COPY,
Actions.BLOCK_PASTE,
Actions.ELEMENT_DELETE,
Actions.SELECT_ALL,
],
[
Actions.BLOCK_ROTATE_CCW,
Actions.BLOCK_ROTATE_CW,
("_Align", Actions.BLOCK_ALIGNMENTS),
],
[Actions.BLOCK_ENABLE, Actions.BLOCK_DISABLE, Actions.BLOCK_BYPASS],
[Actions.BLOCK_PARAM_MODIFY],
],
),
(
"_View",
[
[Actions.TOGGLE_BLOCKS_WINDOW],
[
Actions.TOGGLE_CONSOLE_WINDOW,
Actions.TOGGLE_SCROLL_LOCK,
Actions.SAVE_CONSOLE,
Actions.CLEAR_CONSOLE,
],
[
Actions.TOGGLE_HIDE_VARIABLES,
Actions.TOGGLE_FLOW_GRAPH_VAR_EDITOR,
Actions.TOGGLE_FLOW_GRAPH_VAR_EDITOR_SIDEBAR,
Actions.TOGGLE_SHOW_PARAMETER_EXPRESSION,
Actions.TOGGLE_SHOW_PARAMETER_EVALUATION,
],
[
Actions.TOGGLE_HIDE_DISABLED_BLOCKS,
Actions.TOGGLE_AUTO_HIDE_PORT_LABELS,
Actions.TOGGLE_SNAP_TO_GRID,
Actions.TOGGLE_SHOW_BLOCK_COMMENTS,
Actions.TOGGLE_SHOW_BLOCK_IDS,
],
[Actions.TOGGLE_SHOW_CODE_PREVIEW_TAB],
[Actions.ZOOM_IN],
[Actions.ZOOM_OUT],
[Actions.ZOOM_RESET],
[Actions.ERRORS_WINDOW_DISPLAY, Actions.FIND_BLOCKS],
],
),
(
"_Run",
[Actions.FLOW_GRAPH_GEN, Actions.FLOW_GRAPH_EXEC, Actions.FLOW_GRAPH_KILL],
),
(
"_Tools",
[
[Actions.TOOLS_RUN_FDESIGN, Actions.FLOW_GRAPH_OPEN_QSS_THEME],
[Actions.TOGGLE_SHOW_FLOWGRAPH_COMPLEXITY],
],
),
(
"_Help",
[
[
Actions.HELP_WINDOW_DISPLAY,
Actions.TYPES_WINDOW_DISPLAY,
Actions.KEYBOARD_SHORTCUTS_WINDOW_DISPLAY,
Actions.XML_PARSER_ERRORS_DISPLAY,
],
[Actions.GET_INVOLVED_WINDOW_DISPLAY, Actions.ABOUT_WINDOW_DISPLAY],
],
),
]
# The list of actions for the context menu.
CONTEXT_MENU_LIST = [
[
Actions.BLOCK_CUT,
Actions.BLOCK_COPY,
Actions.BLOCK_PASTE,
Actions.ELEMENT_DELETE,
],
[
Actions.BLOCK_ROTATE_CCW,
Actions.BLOCK_ROTATE_CW,
Actions.BLOCK_ENABLE,
Actions.BLOCK_DISABLE,
Actions.BLOCK_BYPASS,
],
[
(
"_More",
[
[Actions.BLOCK_CREATE_HIER, Actions.OPEN_HIER],
[Actions.BUSSIFY_SOURCES, Actions.BUSSIFY_SINKS],
],
)
],
[Actions.BLOCK_PARAM_MODIFY],
]
class SubMenuHelper(object):
"""Generates custom submenus for the main menu or toolbar."""
def __init__(self):
self.submenus = {}
def build_submenu(self, name, parent_obj, obj_idx, obj, set_func):
# Get the correct helper function
create_func = getattr(self, "create_{}".format(name))
# Save the helper functions for rebuilding the menu later
self.submenus[name] = (create_func, parent_obj, obj_idx, obj, set_func)
# Actually build the menu
set_func(obj, create_func())
def create_flow_graph_new_type(self):
"""Different flowgraph types"""
menu = Gio.Menu()
platform = Gtk.Application.get_default().platform
generate_modes = platform.get_generate_options()
for key, name, default in generate_modes:
target = "app.flowgraph.new_type::{}".format(key)
menu.append(name, target)
return menu
def create_flow_graph_recent(self):
"""Recent flow graphs"""
config = Gtk.Application.get_default().config
recent_files = config.get_recent_files()
menu = Gio.Menu()
if len(recent_files) > 0:
files = Gio.Menu()
for i, file_name in enumerate(recent_files):
target = "app.flowgraph.open_recent::{}".format(file_name)
files.append(file_name.replace("_", "__"), target)
menu.append_section(None, files)
# clear = Gio.Menu()
# clear.append("Clear recent files", "app.flowgraph.clear_recent")
# menu.append_section(None, clear)
else:
# Show an empty menu
menuitem = Gio.MenuItem.new("No items found", "app.none")
menu.append_item(menuitem)
return menu
class MenuHelper(SubMenuHelper):
"""
Recursively builds a menu from a given list of actions.
Args:
- actions: List of actions to build the menu
- menu: Current menu being built
Notes:
- Tuple: Create a new submenu from the parent (1st) and child (2nd) elements
- Action: Append to current menu
- List: Start a new section
"""
def __init__(self):
SubMenuHelper.__init__(self)
def build_menu(self, actions, menu):
for idx, item in enumerate(actions):
log.debug("build_menu idx, action: %s, %s", idx, item)
if isinstance(item, tuple):
# Create a new submenu
parent, child = (item[0], item[1])
# Create the parent
label, target = (parent, None)
if isinstance(parent, Actions.Action):
label = parent.label
target = "{}.{}".format(parent.prefix, parent.name)
menuitem = Gio.MenuItem.new(label, None)
if hasattr(parent, "icon_name"):
menuitem.set_icon(Gio.Icon.new_for_string(parent.icon_name))
# Create the new submenu
if isinstance(child, list):
submenu = Gio.Menu()
self.build_menu(child, submenu)
menuitem.set_submenu(submenu)
elif isinstance(child, str):
# Child is the name of the submenu to create
def set_func(obj, menu):
obj.set_submenu(menu)
self.build_submenu(child, menu, idx, menuitem, set_func)
menu.append_item(menuitem)
elif isinstance(item, list):
# Create a new section
section = Gio.Menu()
self.build_menu(item, section)
menu.append_section(None, section)
elif isinstance(item, Actions.Action):
# Append a new menuitem
target = "{}.{}".format(item.prefix, item.name)
menuitem = Gio.MenuItem.new(item.label, target)
if item.icon_name:
menuitem.set_icon(Gio.Icon.new_for_string(item.icon_name))
menu.append_item(menuitem)
def refresh_submenus(self):
for name in self.submenus:
create_func, parent_obj, obj_idx, obj, set_func = self.submenus[name]
set_func(obj, create_func())
parent_obj.remove(obj_idx)
parent_obj.insert_item(obj_idx, obj)
class ToolbarHelper(SubMenuHelper):
"""
Builds a toolbar from a given list of actions.
Args:
- actions: List of actions to build the menu
- item: Current menu being built
Notes:
- Tuple: Create a new submenu from the parent (1st) and child (2nd) elements
- Action: Append to current menu
- List: Start a new section
"""
def __init__(self):
SubMenuHelper.__init__(self)
def build_toolbar(self, actions, current):
for idx, item in enumerate(actions):
if isinstance(item, list):
# Toolbar's don't have sections like menus, so call this function
# recursively with the "section" and just append a separator.
self.build_toolbar(item, self)
current.insert(Gtk.SeparatorToolItem.new(), -1)
elif isinstance(item, tuple):
parent, child = (item[0], item[1])
# Create an item with a submenu
# Generate the submenu and add to the item.
# Add the item to the toolbar
button = Gtk.MenuToolButton.new()
# The tuple should be made up of an Action and something.
button.set_label(parent.label)
button.set_tooltip_text(parent.tooltip)
button.set_icon_name(parent.icon_name)
target = "{}.{}".format(parent.prefix, parent.name)
button.set_action_name(target)
def set_func(obj, menu):
obj.set_menu(Gtk.Menu.new_from_model(menu))
self.build_submenu(child, current, idx, button, set_func)
current.insert(button, -1)
elif isinstance(item, Actions.Action):
button = Gtk.ToolButton.new()
button.set_label(item.label)
button.set_tooltip_text(item.tooltip)
button.set_icon_name(item.icon_name)
target = "{}.{}".format(item.prefix, item.name)
button.set_action_name(target)
current.insert(button, -1)
def refresh_submenus(self):
for name in self.submenus:
create_func, parent_obj, _, obj, set_func = self.submenus[name]
set_func(obj, create_func())
class Menu(Gio.Menu, MenuHelper):
"""Main Menu"""
def __init__(self):
GObject.GObject.__init__(self)
MenuHelper.__init__(self)
log.debug("Building the main menu")
self.build_menu(MENU_BAR_LIST, self)
class ContextMenu(Gio.Menu, MenuHelper):
"""Context menu for the drawing area"""
def __init__(self):
GObject.GObject.__init__(self)
log.debug("Building the context menu")
self.build_menu(CONTEXT_MENU_LIST, self)
class Toolbar(Gtk.Toolbar, ToolbarHelper):
"""The gtk toolbar with actions added from the toolbar list."""
def __init__(self):
"""
Parse the list of action names in the toolbar list.
Look up the action for each name in the action list and add it to the
toolbar.
"""
GObject.GObject.__init__(self)
ToolbarHelper.__init__(self)
self.set_style(Gtk.ToolbarStyle.ICONS)
# self.get_style_context().add_class(Gtk.STYLE_CLASS_PRIMARY_TOOLBAR)
# SubMenuCreator.__init__(self)
self.build_toolbar(TOOLBAR_LIST, self)
|
extractor | vrv | # coding: utf-8
from __future__ import unicode_literals
import base64
import hashlib
import hmac
import json
import random
import string
import time
from ..compat import (
compat_HTTPError,
compat_urllib_parse,
compat_urllib_parse_urlencode,
)
from ..utils import ExtractorError, float_or_none, int_or_none
from .common import InfoExtractor
class VRVBaseIE(InfoExtractor):
_API_DOMAIN = None
_API_PARAMS = {}
_CMS_SIGNING = {}
_TOKEN = None
_TOKEN_SECRET = ""
def _call_api(self, path, video_id, note, data=None):
# https://tools.ietf.org/html/rfc5849#section-3
base_url = self._API_DOMAIN + "/core/" + path
query = [
("oauth_consumer_key", self._API_PARAMS["oAuthKey"]),
(
"oauth_nonce",
"".join([random.choice(string.ascii_letters) for _ in range(32)]),
),
("oauth_signature_method", "HMAC-SHA1"),
("oauth_timestamp", int(time.time())),
]
if self._TOKEN:
query.append(("oauth_token", self._TOKEN))
encoded_query = compat_urllib_parse_urlencode(query)
headers = self.geo_verification_headers()
if data:
data = json.dumps(data).encode()
headers["Content-Type"] = "application/json"
base_string = "&".join(
[
"POST" if data else "GET",
compat_urllib_parse.quote(base_url, ""),
compat_urllib_parse.quote(encoded_query, ""),
]
)
oauth_signature = base64.b64encode(
hmac.new(
(self._API_PARAMS["oAuthSecret"] + "&" + self._TOKEN_SECRET).encode(
"ascii"
),
base_string.encode(),
hashlib.sha1,
).digest()
).decode()
encoded_query += "&oauth_signature=" + compat_urllib_parse.quote(
oauth_signature, ""
)
try:
return self._download_json(
"?".join([base_url, encoded_query]),
video_id,
note="Downloading %s JSON metadata" % note,
headers=headers,
data=data,
)
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 401:
raise ExtractorError(
json.loads(e.cause.read().decode())["message"], expected=True
)
raise
def _call_cms(self, path, video_id, note):
if not self._CMS_SIGNING:
index = self._call_api("index", video_id, "CMS Signing")
self._CMS_SIGNING = index.get("cms_signing") or {}
if not self._CMS_SIGNING:
for signing_policy in index.get("signing_policies", []):
signing_path = signing_policy.get("path")
if signing_path and signing_path.startswith("/cms/"):
name, value = (
signing_policy.get("name"),
signing_policy.get("value"),
)
if name and value:
self._CMS_SIGNING[name] = value
return self._download_json(
self._API_DOMAIN + path,
video_id,
query=self._CMS_SIGNING,
note="Downloading %s JSON metadata" % note,
headers=self.geo_verification_headers(),
)
def _get_cms_resource(self, resource_key, video_id):
return self._call_api(
"cms_resource",
video_id,
"resource path",
data={
"resource_key": resource_key,
},
)["__links__"]["cms_resource"]["href"]
def _real_initialize(self):
webpage = self._download_webpage(
"https://vrv.co/", None, headers=self.geo_verification_headers()
)
self._API_PARAMS = self._parse_json(
self._search_regex(
[
r"window\.__APP_CONFIG__\s*=\s*({.+?})(?:</script>|;)",
r"window\.__APP_CONFIG__\s*=\s*({.+})",
],
webpage,
"app config",
),
None,
)["cxApiParams"]
self._API_DOMAIN = self._API_PARAMS.get("apiDomain", "https://api.vrv.co")
class VRVIE(VRVBaseIE):
IE_NAME = "vrv"
_VALID_URL = r"https?://(?:www\.)?vrv\.co/watch/(?P<id>[A-Z0-9]+)"
_TESTS = [
{
"url": "https://vrv.co/watch/GR9PNZ396/Hidden-America-with-Jonah-Ray:BOSTON-WHERE-THE-PAST-IS-THE-PRESENT",
"info_dict": {
"id": "GR9PNZ396",
"ext": "mp4",
"title": "BOSTON: WHERE THE PAST IS THE PRESENT",
"description": "md5:4ec8844ac262ca2df9e67c0983c6b83f",
"uploader_id": "seeso",
},
"params": {
# m3u8 download
"skip_download": True,
},
},
{
# movie listing
"url": "https://vrv.co/watch/G6NQXZ1J6/Lily-CAT",
"info_dict": {
"id": "G6NQXZ1J6",
"title": "Lily C.A.T",
"description": "md5:988b031e7809a6aeb60968be4af7db07",
},
"playlist_count": 2,
},
]
_NETRC_MACHINE = "vrv"
def _real_initialize(self):
super(VRVIE, self)._real_initialize()
email, password = self._get_login_info()
if email is None:
return
token_credentials = self._call_api(
"authenticate/by:credentials",
None,
"Token Credentials",
data={
"email": email,
"password": password,
},
)
self._TOKEN = token_credentials["oauth_token"]
self._TOKEN_SECRET = token_credentials["oauth_token_secret"]
def _extract_vrv_formats(
self, url, video_id, stream_format, audio_lang, hardsub_lang
):
if not url or stream_format not in ("hls", "dash", "adaptive_hls"):
return []
stream_id_list = []
if audio_lang:
stream_id_list.append("audio-%s" % audio_lang)
if hardsub_lang:
stream_id_list.append("hardsub-%s" % hardsub_lang)
format_id = stream_format
if stream_id_list:
format_id += "-" + "-".join(stream_id_list)
if "hls" in stream_format:
adaptive_formats = self._extract_m3u8_formats(
url,
video_id,
"mp4",
m3u8_id=format_id,
note="Downloading %s information" % format_id,
fatal=False,
)
elif stream_format == "dash":
adaptive_formats = self._extract_mpd_formats(
url,
video_id,
mpd_id=format_id,
note="Downloading %s information" % format_id,
fatal=False,
)
if audio_lang:
for f in adaptive_formats:
if f.get("acodec") != "none":
f["language"] = audio_lang
return adaptive_formats
def _real_extract(self, url):
video_id = self._match_id(url)
object_data = self._call_cms(
self._get_cms_resource("cms:/objects/" + video_id, video_id),
video_id,
"object",
)["items"][0]
resource_path = object_data["__links__"]["resource"]["href"]
video_data = self._call_cms(resource_path, video_id, "video")
title = video_data["title"]
description = video_data.get("description")
if video_data.get("__class__") == "movie_listing":
items = (
self._call_cms(
video_data["__links__"]["movie_listing/movies"]["href"],
video_id,
"movie listing",
).get("items")
or []
)
if len(items) != 1:
entries = []
for item in items:
item_id = item.get("id")
if not item_id:
continue
entries.append(
self.url_result(
"https://vrv.co/watch/" + item_id,
self.ie_key(),
item_id,
item.get("title"),
)
)
return self.playlist_result(entries, video_id, title, description)
video_data = items[0]
streams_path = video_data["__links__"].get("streams", {}).get("href")
if not streams_path:
self.raise_login_required()
streams_json = self._call_cms(streams_path, video_id, "streams")
audio_locale = streams_json.get("audio_locale")
formats = []
for stream_type, streams in streams_json.get("streams", {}).items():
if stream_type in ("adaptive_hls", "adaptive_dash"):
for stream in streams.values():
formats.extend(
self._extract_vrv_formats(
stream.get("url"),
video_id,
stream_type.split("_")[1],
audio_locale,
stream.get("hardsub_locale"),
)
)
self._sort_formats(formats)
subtitles = {}
for k in ("captions", "subtitles"):
for subtitle in streams_json.get(k, {}).values():
subtitle_url = subtitle.get("url")
if not subtitle_url:
continue
subtitles.setdefault(subtitle.get("locale", "en-US"), []).append(
{
"url": subtitle_url,
"ext": subtitle.get("format", "ass"),
}
)
thumbnails = []
for thumbnail in video_data.get("images", {}).get("thumbnails", []):
thumbnail_url = thumbnail.get("source")
if not thumbnail_url:
continue
thumbnails.append(
{
"url": thumbnail_url,
"width": int_or_none(thumbnail.get("width")),
"height": int_or_none(thumbnail.get("height")),
}
)
return {
"id": video_id,
"title": title,
"formats": formats,
"subtitles": subtitles,
"thumbnails": thumbnails,
"description": description,
"duration": float_or_none(video_data.get("duration_ms"), 1000),
"uploader_id": video_data.get("channel_id"),
"series": video_data.get("series_title"),
"season": video_data.get("season_title"),
"season_number": int_or_none(video_data.get("season_number")),
"season_id": video_data.get("season_id"),
"episode": title,
"episode_number": int_or_none(video_data.get("episode_number")),
"episode_id": video_data.get("production_episode_id"),
}
class VRVSeriesIE(VRVBaseIE):
IE_NAME = "vrv:series"
_VALID_URL = r"https?://(?:www\.)?vrv\.co/series/(?P<id>[A-Z0-9]+)"
_TEST = {
"url": "https://vrv.co/series/G68VXG3G6/The-Perfect-Insider",
"info_dict": {
"id": "G68VXG3G6",
},
"playlist_mincount": 11,
}
def _real_extract(self, url):
series_id = self._match_id(url)
seasons_path = self._get_cms_resource(
"cms:/seasons?series_id=" + series_id, series_id
)
seasons_data = self._call_cms(seasons_path, series_id, "seasons")
entries = []
for season in seasons_data.get("items", []):
episodes_path = season["__links__"]["season/episodes"]["href"]
episodes = self._call_cms(episodes_path, series_id, "episodes")
for episode in episodes.get("items", []):
episode_id = episode["id"]
entries.append(
self.url_result(
"https://vrv.co/watch/" + episode_id,
"VRV",
episode_id,
episode.get("title"),
)
)
return self.playlist_result(entries, series_id)
|
gui | projectedChangeProjectionRange | import eos.db
import gui.mainFrame
import wx
from eos.saveddata.drone import Drone as EosDrone
from eos.saveddata.fighter import Fighter as EosFighter
from eos.saveddata.fit import Fit as EosFit
from eos.saveddata.module import Module as EosModule
from gui import globalEvents as GE
from gui.fitCommands.calc.drone.projectedChangeProjectionRange import (
CalcChangeProjectedDroneProjectionRangeCommand,
)
from gui.fitCommands.calc.fighter.projectedChangeProjectionRange import (
CalcChangeProjectedFighterProjectionRangeCommand,
)
from gui.fitCommands.calc.module.projectedChangeProjectionRange import (
CalcChangeProjectedModuleProjectionRangeCommand,
)
from gui.fitCommands.calc.projectedFit.changeProjectionRange import (
CalcChangeProjectedFitProjectionRangeCommand,
)
from gui.fitCommands.helpers import InternalCommandHistory
from service.fit import Fit
class GuiChangeProjectedItemsProjectionRangeCommand(wx.Command):
def __init__(self, fitID, items, projectionRange):
wx.Command.__init__(self, True, "Change Projected Items Projection Range")
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.projectionRange = projectionRange
self.pModPositions = []
self.pDroneItemIDs = []
self.pFighterPositions = []
self.pFitIDs = []
fit = Fit.getInstance().getFit(fitID)
for item in items:
if isinstance(item, EosModule):
if item in fit.projectedModules and not getattr(
item, "isExclusiveSystemEffect", False
):
self.pModPositions.append(fit.projectedModules.index(item))
elif isinstance(item, EosDrone):
self.pDroneItemIDs.append(item.itemID)
elif isinstance(item, EosFighter):
if item in fit.projectedFighters:
self.pFighterPositions.append(fit.projectedFighters.index(item))
elif isinstance(item, EosFit):
self.pFitIDs.append(item.ID)
def Do(self):
results = []
needRecalc = True
for pModPosition in self.pModPositions:
cmd = CalcChangeProjectedModuleProjectionRangeCommand(
fitID=self.fitID,
position=pModPosition,
projectionRange=self.projectionRange,
)
results.append(self.internalHistory.submit(cmd))
needRecalc = cmd.needsGuiRecalc
for pDroneItemID in self.pDroneItemIDs:
cmd = CalcChangeProjectedDroneProjectionRangeCommand(
fitID=self.fitID,
itemID=pDroneItemID,
projectionRange=self.projectionRange,
)
results.append(self.internalHistory.submit(cmd))
needRecalc = True
for pFighterPosition in self.pFighterPositions:
cmd = CalcChangeProjectedFighterProjectionRangeCommand(
fitID=self.fitID,
position=pFighterPosition,
projectionRange=self.projectionRange,
)
results.append(self.internalHistory.submit(cmd))
needRecalc = True
for pFitID in self.pFitIDs:
cmd = CalcChangeProjectedFitProjectionRangeCommand(
fitID=self.fitID,
projectedFitID=pFitID,
projectionRange=self.projectionRange,
)
results.append(self.internalHistory.submit(cmd))
needRecalc = cmd.needsGuiRecalc
success = any(results)
sFit = Fit.getInstance()
if needRecalc:
eos.db.flush()
sFit.recalc(self.fitID)
sFit.fill(self.fitID)
eos.db.commit()
wx.PostEvent(
gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,))
)
return success
def Undo(self):
success = self.internalHistory.undoAll()
eos.db.flush()
sFit = Fit.getInstance()
sFit.recalc(self.fitID)
sFit.fill(self.fitID)
eos.db.commit()
wx.PostEvent(
gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,))
)
return success
|
sendgrid | sendgrid | """
This library allows you to quickly and easily use the SendGrid Web API v3 via
Python.
For more information on this library, see the README on Github.
http://github.com/sendgrid/sendgrid-python
For more information on the SendGrid v3 API, see the v3 docs:
http://sendgrid.com/docs/API_Reference/api_v3.html
For the user guide, code examples, and more, visit the main docs page:
http://sendgrid.com/docs/index.html
This file provides the SendGrid API Client.
"""
import os
import warnings
import python_http_client
from .version import __version__
class SendGridAPIClient(object):
"""The SendGrid API Client.
Use this object to interact with the v3 API. For example:
sg = sendgrid.SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY'))
...
mail = Mail(from_email, subject, to_email, content)
response = sg.client.mail.send.post(request_body=mail.get())
For examples and detailed use instructions, see
https://github.com/sendgrid/sendgrid-python
"""
def __init__(
self,
apikey=None,
api_key=None,
impersonate_subuser=None,
host="https://api.sendgrid.com",
**opts,
): # TODO: remove **opts for 6.x release
"""
Construct SendGrid v3 API object.
Note that underlying client being set up during initialization, therefore changing
attributes in runtime will not affect HTTP client behaviour.
:param apikey: SendGrid API key to use. If not provided, key will be read from
environment variable "SENDGRID_API_KEY"
:type apikey: basestring
:param api_key: SendGrid API key to use. Provides backward compatibility
.. deprecated:: 5.3
Use apikey instead
:type api_key: basestring
:param impersonate_subuser: the subuser to impersonate. Will be passed by
"On-Behalf-Of" header by underlying client.
See https://sendgrid.com/docs/User_Guide/Settings/subusers.html for more details
:type impersonate_subuser: basestring
:param host: base URL for API calls
:type host: basestring
:param opts: dispatcher for deprecated arguments. Added for backward-compatibility
with `path` parameter. Should be removed during 6.x release
"""
if opts:
warnings.warn(
"Unsupported argument(s) provided: {}".format(list(opts.keys())),
DeprecationWarning,
)
self.apikey = apikey or api_key or os.environ.get("SENDGRID_API_KEY")
self.impersonate_subuser = impersonate_subuser
self.host = host
self.useragent = "sendgrid/{0};python".format(__version__)
self.version = __version__
self.client = python_http_client.Client(
host=self.host, request_headers=self._default_headers, version=3
)
@property
def _default_headers(self):
headers = {
"Authorization": "Bearer {0}".format(self.apikey),
"User-agent": self.useragent,
"Accept": "application/json",
}
if self.impersonate_subuser:
headers["On-Behalf-Of"] = self.impersonate_subuser
return headers
def reset_request_headers(self):
self.client.request_headers = self._default_headers
@property
def api_key(self):
"""
Alias for reading API key
.. deprecated:: 5.3
Use apikey instead
"""
return self.apikey
@api_key.setter
def api_key(self, value):
self.apikey = value
|
util | tornado | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import logging
import mimetypes
import os
import re
import sys
from urllib.parse import urlparse
import octoprint.util
import tornado
import tornado.escape
import tornado.gen
import tornado.http1connection
import tornado.httpclient
import tornado.httpserver
import tornado.httputil
import tornado.iostream
import tornado.tcpserver
import tornado.util
import tornado.web
from zipstream.ng import ZIP_DEFLATED, ZipStream
def fix_json_encode():
"""
This makes tornado.escape.json_encode use octoprint.util.JsonEncoding.encode as fallback in order to allow
serialization of globally registered types like frozendict and others.
"""
import json
from octoprint.util.json import JsonEncoding
def fixed_json_encode(value):
return json.dumps(value, default=JsonEncoding.encode, allow_nan=False).replace(
"</", "<\\/"
)
import tornado.escape
tornado.escape.json_encode = fixed_json_encode
def enable_per_message_deflate_extension():
"""
This configures tornado.websocket.WebSocketHandler.get_compression_options to support the permessage-deflate extension
to the websocket protocol, minimizing data bandwidth if clients support the extension as well
"""
def get_compression_options(self):
return {"compression_level": 1, "mem_level": 1}
tornado.websocket.WebSocketHandler.get_compression_options = get_compression_options
def fix_websocket_check_origin():
"""
This fixes tornado.websocket.WebSocketHandler.check_origin to do the same origin check against the Host
header case-insensitively, as defined in RFC6454, Section 4, item 5.
"""
scheme_translation = {"wss": "https", "ws": "http"}
def patched_check_origin(self, origin):
def get_check_tuple(urlstring):
parsed = urlparse(urlstring)
scheme = scheme_translation.get(parsed.scheme, parsed.scheme)
return (
scheme,
parsed.hostname,
parsed.port
if parsed.port
else 80
if scheme == "http"
else 443
if scheme == "https"
else None,
)
return get_check_tuple(origin) == get_check_tuple(self.request.full_url())
import tornado.websocket
tornado.websocket.WebSocketHandler.check_origin = patched_check_origin
# ~~ More sensible logging
class RequestlessExceptionLoggingMixin(tornado.web.RequestHandler):
LOG_REQUEST = False
def log_exception(self, typ, value, tb, *args, **kwargs):
if isinstance(value, tornado.web.HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = [value.status_code, self._request_summary()] + list(value.args)
tornado.web.gen_log.warning(format, *args)
else:
if self.LOG_REQUEST:
tornado.web.app_log.error(
"Uncaught exception %s\n%r",
self._request_summary(),
self.request,
exc_info=(typ, value, tb),
)
else:
tornado.web.app_log.error(
"Uncaught exception %s",
self._request_summary(),
exc_info=(typ, value, tb),
)
# ~~ CORS support
class CorsSupportMixin(tornado.web.RequestHandler):
"""
`tornado.web.RequestHandler <http://tornado.readthedocs.org/en/branch4.0/web.html#request-handlers>`_ mixin that
makes sure to set CORS headers similarly to the Flask backed API endpoints.
"""
ENABLE_CORS = False
def set_default_headers(self):
origin = self.request.headers.get("Origin")
if self.request.method != "OPTIONS" and origin and self.ENABLE_CORS:
self.set_header("Access-Control-Allow-Origin", origin)
@tornado.gen.coroutine
def options(self, *args, **kwargs):
if self.ENABLE_CORS:
origin = self.request.headers.get("Origin")
method = self.request.headers.get("Access-Control-Request-Method")
# Allow the origin which made the XHR
self.set_header("Access-Control-Allow-Origin", origin)
# Allow the actual method
self.set_header("Access-Control-Allow-Methods", method)
# Allow for 10 seconds
self.set_header("Access-Control-Max-Age", "10")
# 'preflight' request contains the non-standard headers the real request will have (like X-Api-Key)
custom_headers = self.request.headers.get("Access-Control-Request-Headers")
if custom_headers is not None:
self.set_header("Access-Control-Allow-Headers", custom_headers)
self.set_status(204)
self.finish()
# ~~ WSGI middleware
@tornado.web.stream_request_body
class UploadStorageFallbackHandler(RequestlessExceptionLoggingMixin, CorsSupportMixin):
"""
A ``RequestHandler`` similar to ``tornado.web.FallbackHandler`` which fetches any files contained in the request bodies
of content type ``multipart``, stores them in temporary files and supplies the ``fallback`` with the file's ``name``,
``content_type``, ``path`` and ``size`` instead via a rewritten body.
Basically similar to what the nginx upload module does.
Basic request body example:
.. code-block:: none
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="file"; filename="test.gcode"
Content-Type: application/octet-stream
...
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="apikey"
my_funny_apikey
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="select"
true
------WebKitFormBoundarypYiSUx63abAmhT5C--
That would get turned into:
.. code-block:: none
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="apikey"
my_funny_apikey
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="select"
true
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="file.path"
Content-Type: text/plain; charset=utf-8
/tmp/tmpzupkro
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="file.name"
Content-Type: text/plain; charset=utf-8
test.gcode
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="file.content_type"
Content-Type: text/plain; charset=utf-8
application/octet-stream
------WebKitFormBoundarypYiSUx63abAmhT5C
Content-Disposition: form-data; name="file.size"
Content-Type: text/plain; charset=utf-8
349182
------WebKitFormBoundarypYiSUx63abAmhT5C--
The underlying application can then access the contained files via their respective paths and just move them
where necessary.
"""
BODY_METHODS = ("POST", "PATCH", "PUT")
""" The request methods that may contain a request body. """
def initialize(
self, fallback, file_prefix="tmp", file_suffix="", path=None, suffixes=None
):
if not suffixes:
suffixes = {}
self._fallback = fallback
self._file_prefix = file_prefix
self._file_suffix = file_suffix
self._path = path
self._suffixes = {key: key for key in ("name", "path", "content_type", "size")}
for suffix_type, suffix in suffixes.items():
if suffix_type in self._suffixes and suffix is not None:
self._suffixes[suffix_type] = suffix
# multipart boundary
self._multipart_boundary = None
# Parts, files and values will be stored here
self._parts = {}
self._files = []
# Part currently being processed
self._current_part = None
# content type of request body
self._content_type = None
# bytes left to read according to content_length of request body
self._bytes_left = 0
# buffer needed for identifying form data parts
self._buffer = b""
# buffer for new body
self._new_body = b""
# logger
self._logger = logging.getLogger(__name__)
def prepare(self):
"""
Prepares the processing of the request. If it's a request that may contain a request body (as defined in
:attr:`UploadStorageFallbackHandler.BODY_METHODS`) prepares the multipart parsing if content type fits. If it's a
body-less request, just calls the ``fallback`` with an empty body and finishes the request.
"""
if self.request.method in UploadStorageFallbackHandler.BODY_METHODS:
self._bytes_left = self.request.headers.get("Content-Length", 0)
self._content_type = self.request.headers.get("Content-Type", None)
# request might contain a body
if self.is_multipart():
if not self._bytes_left:
# we don't support requests without a content-length
raise tornado.web.HTTPError(
411, log_message="No Content-Length supplied"
)
# extract the multipart boundary
fields = self._content_type.split(";")
for field in fields:
k, sep, v = field.strip().partition("=")
if k == "boundary" and v:
if v.startswith('"') and v.endswith('"'):
self._multipart_boundary = tornado.escape.utf8(v[1:-1])
else:
self._multipart_boundary = tornado.escape.utf8(v)
break
else:
# RFC2046 section 5.1 (as referred to from RFC 7578) defines the boundary
# parameter as mandatory for multipart requests:
#
# The only mandatory global parameter for the "multipart" media type is
# the boundary parameter, which consists of 1 to 70 characters [...]
#
# So no boundary? 400 Bad Request
raise tornado.web.HTTPError(
400, log_message="No multipart boundary supplied"
)
else:
self._fallback(self.request, b"")
self._finished = True
def data_received(self, chunk):
"""
Called by Tornado on receiving a chunk of the request body. If request is a multipart request, takes care of
processing the multipart data structure via :func:`_process_multipart_data`. If not, just adds the chunk to
internal in-memory buffer.
:param chunk: chunk of data received from Tornado
"""
data = self._buffer + chunk
if self.is_multipart():
self._process_multipart_data(data)
else:
self._buffer = data
def is_multipart(self):
"""Checks whether this request is a ``multipart`` request"""
return self._content_type is not None and self._content_type.startswith(
"multipart"
)
def _process_multipart_data(self, data):
"""
Processes the given data, parsing it for multipart definitions and calling the appropriate methods.
:param data: the data to process as a string
"""
# check for boundary
delimiter = b"--%s" % self._multipart_boundary
delimiter_loc = data.find(delimiter)
delimiter_len = len(delimiter)
end_of_header = -1
if delimiter_loc != -1:
# found the delimiter in the currently available data
delimiter_data_end = 0 if delimiter_loc == 0 else delimiter_loc - 2
data, self._buffer = data[0:delimiter_data_end], data[delimiter_loc:]
end_of_header = self._buffer.find(b"\r\n\r\n")
else:
# make sure any boundary (with single or double ==) contained at the end of chunk does not get
# truncated by this processing round => save it to the buffer for next round
endlen = len(self._multipart_boundary) + 4
data, self._buffer = data[0:-endlen], data[-endlen:]
# stream data to part handler
if data and self._current_part:
self._on_part_data(self._current_part, data)
if end_of_header >= 0:
self._on_part_header(self._buffer[delimiter_len + 2 : end_of_header])
self._buffer = self._buffer[end_of_header + 4 :]
if delimiter_loc != -1 and self._buffer.strip() == delimiter + b"--":
# we saw the last boundary and are at the end of our request
if self._current_part:
self._on_part_finish(self._current_part)
self._current_part = None
self._buffer = b""
self._on_request_body_finish()
def _on_part_header(self, header):
"""
Called for a new multipart header, takes care of parsing the header and calling :func:`_on_part` with the
relevant data, setting the current part in the process.
:param header: header to parse
"""
# close any open parts
if self._current_part:
self._on_part_finish(self._current_part)
self._current_part = None
header_check = header.find(self._multipart_boundary)
if header_check != -1:
self._logger.warning(
"Header still contained multipart boundary, stripping it..."
)
header = header[header_check:]
# convert to dict
try:
header = tornado.httputil.HTTPHeaders.parse(header.decode("utf-8"))
except UnicodeDecodeError:
try:
header = tornado.httputil.HTTPHeaders.parse(header.decode("iso-8859-1"))
except Exception:
# looks like we couldn't decode something here neither as UTF-8 nor ISO-8859-1
self._logger.warning(
"Could not decode multipart headers in request, should be either UTF-8 or ISO-8859-1"
)
self.send_error(400)
return
disp_header = header.get("Content-Disposition", "")
disposition, disp_params = _parse_header(disp_header, strip_quotes=False)
if disposition != "form-data":
self._logger.warning(
"Got a multipart header without form-data content disposition, ignoring that one"
)
return
if not disp_params.get("name"):
self._logger.warning(
"Got a multipart header without name, ignoring that one"
)
return
filename = disp_params.get("filename*", None) # RFC 5987 header present?
if filename is not None:
try:
filename = _extended_header_value(filename)
except Exception:
# parse error, this is not RFC 5987 compliant after all
self._logger.warning(
"extended filename* value {!r} is not RFC 5987 compliant".format(
filename
)
)
self.send_error(400)
return
else:
# no filename* header, just strip quotes from filename header then and be done
filename = _strip_value_quotes(disp_params.get("filename", None))
self._current_part = self._on_part_start(
_strip_value_quotes(disp_params["name"]),
header.get("Content-Type", None),
filename=filename,
)
def _on_part_start(self, name, content_type, filename=None):
"""
Called for new parts in the multipart stream. If ``filename`` is given creates new ``file`` part (which leads
to storage of the data as temporary file on disk), if not creates a new ``data`` part (which stores
incoming data in memory).
Structure of ``file`` parts:
* ``name``: name of the part
* ``filename``: filename associated with the part
* ``path``: path to the temporary file storing the file's data
* ``content_type``: content type of the part
* ``file``: file handle for the temporary file (mode "wb", not deleted on close, will be deleted however after
handling of the request has finished in :func:`_handle_method`)
Structure of ``data`` parts:
* ``name``: name of the part
* ``content_type``: content type of the part
* ``data``: bytes of the part (initialized to an empty string)
:param name: name of the part
:param content_type: content type of the part
:param filename: filename associated with the part.
:return: dict describing the new part
"""
if filename is not None:
# this is a file
import tempfile
handle = tempfile.NamedTemporaryFile(
mode="wb",
prefix=self._file_prefix,
suffix=self._file_suffix,
dir=self._path,
delete=False,
)
return {
"name": tornado.escape.utf8(name),
"filename": tornado.escape.utf8(filename),
"path": tornado.escape.utf8(handle.name),
"content_type": tornado.escape.utf8(content_type),
"file": handle,
}
else:
return {
"name": tornado.escape.utf8(name),
"content_type": tornado.escape.utf8(content_type),
"data": b"",
}
def _on_part_data(self, part, data):
"""
Called when new bytes are received for the given ``part``, takes care of writing them to their storage.
:param part: part for which data was received
:param data: data chunk which was received
"""
if "file" in part:
part["file"].write(data)
else:
part["data"] += data
def _on_part_finish(self, part):
"""
Called when a part gets closed, takes care of storing the finished part in the internal parts storage and for
``file`` parts closing the temporary file and storing the part in the internal files storage.
:param part: part which was closed
"""
name = part["name"]
self._parts[name] = part
if "file" in part:
self._files.append(part["path"])
part["file"].close()
del part["file"]
def _on_request_body_finish(self):
"""
Called when the request body has been read completely. Takes care of creating the replacement body out of the
logged parts, turning ``file`` parts into new ``data`` parts.
"""
self._new_body = b""
for name, part in self._parts.items():
if "filename" in part:
# add form fields for filename, path, size and content_type for all files contained in the request
if "path" not in part:
continue
parameters = {
"name": part["filename"],
"path": part["path"],
"size": str(os.stat(part["path"]).st_size),
}
if "content_type" in part:
parameters["content_type"] = part["content_type"]
fields = {
self._suffixes[key]: value for (key, value) in parameters.items()
}
for n, p in fields.items():
if n is None or p is None:
continue
key = name + b"." + octoprint.util.to_bytes(n)
self._new_body += b"--%s\r\n" % self._multipart_boundary
self._new_body += (
b'Content-Disposition: form-data; name="%s"\r\n' % key
)
self._new_body += b"Content-Type: text/plain; charset=utf-8\r\n"
self._new_body += b"\r\n"
self._new_body += octoprint.util.to_bytes(p) + b"\r\n"
elif "data" in part:
self._new_body += b"--%s\r\n" % self._multipart_boundary
value = part["data"]
self._new_body += (
b'Content-Disposition: form-data; name="%s"\r\n' % name
)
if "content_type" in part and part["content_type"] is not None:
self._new_body += b"Content-Type: %s\r\n" % part["content_type"]
self._new_body += b"\r\n"
self._new_body += value + b"\r\n"
self._new_body += b"--%s--\r\n" % self._multipart_boundary
def _handle_method(self, *args, **kwargs):
"""
Takes care of defining the new request body if necessary and forwarding
the current request and changed body to the ``fallback``.
"""
# determine which body to supply
body = b""
if self.is_multipart():
# make sure we really processed all data in the buffer
while len(self._buffer):
self._process_multipart_data(self._buffer)
# use rewritten body
body = self._new_body
elif self.request.method in UploadStorageFallbackHandler.BODY_METHODS:
# directly use data from buffer
body = self._buffer
# rewrite content length
self.request.headers["Content-Length"] = len(body)
try:
# call the configured fallback with request and body to use
self._fallback(self.request, body)
self._headers_written = True
finally:
# make sure the temporary files are removed again
for f in self._files:
octoprint.util.silent_remove(f)
# make all http methods trigger _handle_method
get = _handle_method
post = _handle_method
put = _handle_method
patch = _handle_method
delete = _handle_method
head = _handle_method
options = _handle_method
def _parse_header(line, strip_quotes=True):
parts = tornado.httputil._parseparam(";" + line)
key = next(parts)
pdict = {}
for p in parts:
i = p.find("=")
if i >= 0:
name = p[:i].strip().lower()
value = p[i + 1 :].strip()
if strip_quotes:
value = _strip_value_quotes(value)
pdict[name] = value
return key, pdict
def _strip_value_quotes(value):
if not value:
return value
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace("\\\\", "\\").replace('\\"', '"')
return value
def _extended_header_value(value):
if not value:
return value
if value.lower().startswith("iso-8859-1'") or value.lower().startswith("utf-8'"):
# RFC 5987 section 3.2
from urllib.parse import unquote
encoding, _, value = value.split("'", 2)
return unquote(value, encoding=encoding)
else:
# no encoding provided, strip potentially present quotes and call it a day
return octoprint.util.to_unicode(_strip_value_quotes(value), encoding="utf-8")
class WsgiInputContainer:
"""
A WSGI container for use with Tornado that allows supplying the request body to be used for ``wsgi.input`` in the
generated WSGI environment upon call.
A ``RequestHandler`` can thus provide the WSGI application with a stream for the request body, or a modified body.
Example usage:
.. code-block:: python
wsgi_app = octoprint.server.util.WsgiInputContainer(octoprint_app)
application = tornado.web.Application([
(r".*", UploadStorageFallbackHandler, dict(fallback=wsgi_app),
])
The implementation logic is basically the same as ``tornado.wsgi.WSGIContainer`` but the ``__call__`` and ``environ``
methods have been adjusted to allow for an optionally supplied ``body`` argument which is then used for ``wsgi.input``.
"""
def __init__(
self, wsgi_application, headers=None, forced_headers=None, removed_headers=None
):
self.wsgi_application = wsgi_application
if headers is None:
headers = {}
if forced_headers is None:
forced_headers = {}
if removed_headers is None:
removed_headers = []
self.headers = headers
self.forced_headers = forced_headers
self.removed_headers = removed_headers
def __call__(self, request, body=None):
"""
Wraps the call against the WSGI app, deriving the WSGI environment from the supplied Tornado ``HTTPServerRequest``.
:param request: the ``tornado.httpserver.HTTPServerRequest`` to derive the WSGI environment from
:param body: an optional body to use as ``wsgi.input`` instead of ``request.body``, can be a string or a stream
"""
data = {}
response = []
def start_response(status, response_headers, exc_info=None):
data["status"] = status
data["headers"] = response_headers
return response.append
app_response = self.wsgi_application(
WsgiInputContainer.environ(request, body), start_response
)
try:
response.extend(app_response)
body = b"".join(response)
finally:
if hasattr(app_response, "close"):
app_response.close()
if not data:
raise Exception("WSGI app did not call start_response")
status_code, reason = data["status"].split(" ", 1)
status_code = int(status_code)
headers = data["headers"]
header_set = {k.lower() for (k, v) in headers}
body = tornado.escape.utf8(body)
if status_code != 304:
if "content-length" not in header_set:
headers.append(("Content-Length", str(len(body))))
if "content-type" not in header_set:
headers.append(("Content-Type", "text/html; charset=UTF-8"))
header_set = {k.lower() for (k, v) in headers}
for header, value in self.headers.items():
if header.lower() not in header_set:
headers.append((header, value))
for header, value in self.forced_headers.items():
headers.append((header, value))
headers = [
(header, value)
for header, value in headers
if header.lower() not in self.removed_headers
]
start_line = tornado.httputil.ResponseStartLine("HTTP/1.1", status_code, reason)
header_obj = tornado.httputil.HTTPHeaders()
for key, value in headers:
header_obj.add(key, value)
request.connection.write_headers(start_line, header_obj, chunk=body)
request.connection.finish()
self._log(status_code, request)
@staticmethod
def environ(request, body=None):
"""
Converts a ``tornado.httputil.HTTPServerRequest`` to a WSGI environment.
An optional ``body`` to be used for populating ``wsgi.input`` can be supplied (either a string or a stream). If not
supplied, ``request.body`` will be wrapped into a ``io.BytesIO`` stream and used instead.
:param request: the ``tornado.httpserver.HTTPServerRequest`` to derive the WSGI environment from
:param body: an optional body to use as ``wsgi.input`` instead of ``request.body``, can be a string or a stream
"""
import io
from tornado.wsgi import to_wsgi_str
# determine the request_body to supply as wsgi.input
if body is not None:
if isinstance(body, (bytes, str)):
request_body = io.BytesIO(tornado.escape.utf8(body))
else:
request_body = body
else:
request_body = io.BytesIO(tornado.escape.utf8(request.body))
hostport = request.host.split(":")
if len(hostport) == 2:
host = hostport[0]
port = int(hostport[1])
else:
host = request.host
port = 443 if request.protocol == "https" else 80
environ = {
"REQUEST_METHOD": request.method,
"SCRIPT_NAME": "",
"PATH_INFO": to_wsgi_str(
tornado.escape.url_unescape(request.path, encoding=None, plus=False)
),
"QUERY_STRING": request.query,
"REMOTE_ADDR": request.remote_ip,
"SERVER_NAME": host,
"SERVER_PORT": str(port),
"SERVER_PROTOCOL": request.version,
"wsgi.version": (1, 0),
"wsgi.url_scheme": request.protocol,
"wsgi.input": request_body,
"wsgi.errors": sys.stderr,
"wsgi.multithread": False,
"wsgi.multiprocess": True,
"wsgi.run_once": False,
}
if "Content-Type" in request.headers:
environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
if "Content-Length" in request.headers:
environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
# remove transfer encoding header if chunked, otherwise flask wsgi entrypoint makes input empty
if (
"Transfer-Encoding" in request.headers
and request.headers.get("Transfer-Encoding") == "chunked"
):
request.headers.pop("Transfer-Encoding")
for key, value in request.headers.items():
environ["HTTP_" + key.replace("-", "_").upper()] = value
return environ
def _log(self, status_code, request):
access_log = logging.getLogger("tornado.access")
if status_code < 400:
log_method = access_log.info
elif status_code < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000 * request.request_time()
summary = request.method + " " + request.uri + " (" + request.remote_ip + ")"
log_method("%d %s %.2fms", status_code, summary, request_time)
# ~~ customized HTTP1Connection implementation
class CustomHTTPServer(tornado.httpserver.HTTPServer):
"""
Custom implementation of ``tornado.httpserver.HTTPServer`` that allows defining max body sizes depending on path and
method.
The implementation is mostly taken from ``tornado.httpserver.HTTPServer``, the only difference is the creation
of a ``CustomHTTP1ConnectionParameters`` instance instead of ``tornado.http1connection.HTTP1ConnectionParameters``
which is supplied with the two new constructor arguments ``max_body_sizes`` and ``max_default_body_size`` and the
creation of a ``CustomHTTP1ServerConnection`` instead of a ``tornado.http1connection.HTTP1ServerConnection`` upon
connection by a client.
``max_body_sizes`` is expected to be an iterable containing tuples of the form (method, path regex, maximum body size),
with method and path regex having to match in order for maximum body size to take affect.
``default_max_body_size`` is the default maximum body size to apply if no specific one from ``max_body_sizes`` matches.
"""
def __init__(self, *args, **kwargs):
pass
def initialize(self, *args, **kwargs):
default_max_body_size = kwargs.pop("default_max_body_size", None)
max_body_sizes = kwargs.pop("max_body_sizes", None)
tornado.httpserver.HTTPServer.initialize(self, *args, **kwargs)
additional = {
"default_max_body_size": default_max_body_size,
"max_body_sizes": max_body_sizes,
}
self.conn_params = CustomHTTP1ConnectionParameters.from_stock_params(
self.conn_params, **additional
)
def handle_stream(self, stream, address):
context = tornado.httpserver._HTTPRequestContext(
stream, address, self.protocol, self.trusted_downstream
)
conn = CustomHTTP1ServerConnection(stream, self.conn_params, context)
self._connections.add(conn)
conn.start_serving(self)
class CustomHTTP1ServerConnection(tornado.http1connection.HTTP1ServerConnection):
"""
A custom implementation of ``tornado.http1connection.HTTP1ServerConnection`` which utilizes a ``CustomHTTP1Connection``
instead of a ``tornado.http1connection.HTTP1Connection`` in ``_server_request_loop``. The implementation logic is
otherwise the same as ``tornado.http1connection.HTTP1ServerConnection``.
"""
@tornado.gen.coroutine
def _server_request_loop(self, delegate):
try:
while True:
conn = CustomHTTP1Connection(
self.stream, False, self.params, self.context
)
request_delegate = delegate.start_request(self, conn)
try:
ret = yield conn.read_response(request_delegate)
except (
tornado.iostream.StreamClosedError,
tornado.iostream.UnsatisfiableReadError,
):
return
except tornado.http1connection._QuietException:
# This exception was already logged.
conn.close()
return
except Exception:
tornado.http1connection.gen_log.error(
"Uncaught exception", exc_info=True
)
conn.close()
return
if not ret:
return
yield tornado.gen.moment
finally:
delegate.on_close(self)
class CustomHTTP1Connection(tornado.http1connection.HTTP1Connection):
"""
A custom implementation of ``tornado.http1connection.HTTP1Connection`` which upon checking the ``Content-Length`` of
the request against the configured maximum utilizes ``max_body_sizes`` and ``default_max_body_size`` as a fallback.
"""
def __init__(self, stream, is_client, params=None, context=None):
if params is None:
params = CustomHTTP1ConnectionParameters()
tornado.http1connection.HTTP1Connection.__init__(
self, stream, is_client, params=params, context=context
)
import re
self._max_body_sizes = list(
map(
lambda x: (x[0], re.compile(x[1]), x[2]),
self.params.max_body_sizes or list(),
)
)
self._default_max_body_size = (
self.params.default_max_body_size or self.stream.max_buffer_size
)
def _read_body(self, code, headers, delegate):
"""
Basically the same as ``tornado.http1connection.HTTP1Connection._read_body``, but determines the maximum
content length individually for the request (utilizing ``._get_max_content_length``).
If the individual max content length is 0 or smaller no content length is checked. If the content length of the
current request exceeds the individual max content length, the request processing is aborted and an
``HTTPInputError`` is raised.
"""
if "Content-Length" in headers:
if "Transfer-Encoding" in headers:
# Response cannot contain both Content-Length and
# Transfer-Encoding headers.
# http://tools.ietf.org/html/rfc7230#section-3.3.3
raise tornado.httputil.HTTPInputError(
"Response with both Transfer-Encoding and Content-Length"
)
if "," in headers["Content-Length"]:
# Proxies sometimes cause Content-Length headers to get
# duplicated. If all the values are identical then we can
# use them but if they differ it's an error.
pieces = re.split(r",\s*", headers["Content-Length"])
if any(i != pieces[0] for i in pieces):
raise tornado.httputil.HTTPInputError(
"Multiple unequal Content-Lengths: %r"
% headers["Content-Length"]
)
headers["Content-Length"] = pieces[0]
try:
content_length = int(headers["Content-Length"])
except ValueError:
# Handles non-integer Content-Length value.
raise tornado.httputil.HTTPInputError(
"Only integer Content-Length is allowed: %s"
% headers["Content-Length"]
)
max_content_length = self._get_max_content_length(
self._request_start_line.method, self._request_start_line.path
)
if (
max_content_length is not None
and 0 <= max_content_length < content_length
):
raise tornado.httputil.HTTPInputError("Content-Length too long")
else:
content_length = None
if code == 204:
# This response code is not allowed to have a non-empty body,
# and has an implicit length of zero instead of read-until-close.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
if "Transfer-Encoding" in headers or content_length not in (None, 0):
raise tornado.httputil.HTTPInputError(
"Response with code %d should not have body" % code
)
content_length = 0
if content_length is not None:
return self._read_fixed_body(content_length, delegate)
if headers.get("Transfer-Encoding") == "chunked":
return self._read_chunked_body(delegate)
if self.is_client:
return self._read_body_until_close(delegate)
return None
def _get_max_content_length(self, method, path):
"""
Gets the max content length for the given method and path. Checks whether method and path match against any
of the specific maximum content lengths supplied in ``max_body_sizes`` and returns that as the maximum content
length if available, otherwise returns ``default_max_body_size``.
:param method: method of the request to match against
:param path: path of the request to match against
:return: determine maximum content length to apply to this request, max return 0 for unlimited allowed content
length
"""
for m, p, s in self._max_body_sizes:
if method == m and p.match(path):
return s
return self._default_max_body_size
class CustomHTTP1ConnectionParameters(
tornado.http1connection.HTTP1ConnectionParameters
):
"""
An implementation of ``tornado.http1connection.HTTP1ConnectionParameters`` that adds two new parameters
``max_body_sizes`` and ``default_max_body_size``.
For a description of these please see the documentation of ``CustomHTTPServer`` above.
"""
def __init__(self, *args, **kwargs):
max_body_sizes = kwargs.pop("max_body_sizes", list())
default_max_body_size = kwargs.pop("default_max_body_size", None)
tornado.http1connection.HTTP1ConnectionParameters.__init__(
self, *args, **kwargs
)
self.max_body_sizes = max_body_sizes
self.default_max_body_size = default_max_body_size
@classmethod
def from_stock_params(cls, other, **additional):
kwargs = dict(other.__dict__)
for key, value in additional.items():
kwargs[key] = value
return cls(**kwargs)
# ~~ customized large response handler
class LargeResponseHandler(
RequestlessExceptionLoggingMixin, CorsSupportMixin, tornado.web.StaticFileHandler
):
"""
Customized `tornado.web.StaticFileHandler <http://tornado.readthedocs.org/en/branch4.0/web.html#tornado.web.StaticFileHandler>`_
that allows delivery of the requested resource as attachment and access and request path validation through
optional callbacks. Note that access validation takes place before path validation.
Arguments:
path (str): The system path from which to serve files (this will be forwarded to the ``initialize`` method of
:class:``~tornado.web.StaticFileHandler``)
default_filename (str): The default filename to serve if none is explicitly specified and the request references
a subdirectory of the served path (this will be forwarded to the ``initialize`` method of
:class:``~tornado.web.StaticFileHandler`` as the ``default_filename`` keyword parameter). Defaults to ``None``.
as_attachment (bool): Whether to serve requested files with ``Content-Disposition: attachment`` header (``True``)
or not. Defaults to ``False``.
allow_client_caching (bool): Whether to allow the client to cache (by not setting any ``Cache-Control`` or
``Expires`` headers on the response) or not.
access_validation (function): Callback to call in the ``get`` method to validate access to the resource. Will
be called with ``self.request`` as parameter which contains the full tornado request object. Should raise
a ``tornado.web.HTTPError`` if access is not allowed in which case the request will not be further processed.
Defaults to ``None`` and hence no access validation being performed.
path_validation (function): Callback to call in the ``get`` method to validate the requested path. Will be called
with the requested path as parameter. Should raise a ``tornado.web.HTTPError`` (e.g. an 404) if the requested
path does not pass validation in which case the request will not be further processed.
Defaults to ``None`` and hence no path validation being performed.
etag_generator (function): Callback to call for generating the value of the ETag response header. Will be
called with the response handler as parameter. May return ``None`` to prevent the ETag response header
from being set. If not provided the last modified time of the file in question will be used as returned
by ``get_content_version``.
name_generator (function): Callback to call for generating the value of the attachment file name header. Will be
called with the requested path as parameter.
mime_type_guesser (function): Callback to guess the mime type to use for the content type encoding of the
response. Will be called with the requested path on disk as parameter.
is_pre_compressed (bool): if the file is expected to be pre-compressed, i.e, if there is a file in the same
directory with the same name, but with '.gz' appended and gzip-encoded
"""
def initialize(
self,
path,
default_filename=None,
as_attachment=False,
allow_client_caching=True,
access_validation=None,
path_validation=None,
etag_generator=None,
name_generator=None,
mime_type_guesser=None,
is_pre_compressed=False,
stream_body=False,
):
tornado.web.StaticFileHandler.initialize(
self, os.path.abspath(path), default_filename
)
self._as_attachment = as_attachment
self._allow_client_caching = allow_client_caching
self._access_validation = access_validation
self._path_validation = path_validation
self._etag_generator = etag_generator
self._name_generator = name_generator
self._mime_type_guesser = mime_type_guesser
self._is_pre_compressed = is_pre_compressed
self._stream_body = stream_body
def should_use_precompressed(self):
return self._is_pre_compressed and "gzip" in self.request.headers.get(
"Accept-Encoding", ""
)
def get(self, path, include_body=True):
if self._access_validation is not None:
self._access_validation(self.request)
if self._path_validation is not None:
self._path_validation(path)
if "cookie" in self.request.arguments:
self.set_cookie(self.request.arguments["cookie"][0], "true", path="/")
if self.should_use_precompressed():
if os.path.exists(os.path.join(self.root, path + ".gz")):
self.set_header("Content-Encoding", "gzip")
path = path + ".gz"
else:
logging.getLogger(__name__).warning(
"Precompressed assets expected but {}.gz does not exist "
"in {}, using plain file instead.".format(path, self.root)
)
if self._stream_body:
return self.streamed_get(path, include_body=include_body)
else:
return tornado.web.StaticFileHandler.get(
self, path, include_body=include_body
)
@tornado.gen.coroutine
def streamed_get(self, path, include_body=True):
"""
Version of StaticFileHandler.get that doesn't support ranges or ETag but streams the content. Helpful for files
that might still change while being transmitted (e.g. log files)
"""
# Set up our path instance variables.
self.path = self.parse_url_path(path)
del path # make sure we don't refer to path instead of self.path again
absolute_path = self.get_absolute_path(self.root, self.path)
self.absolute_path = self.validate_absolute_path(self.root, absolute_path)
if self.absolute_path is None:
return
content_type = self.get_content_type()
if content_type:
self.set_header("Content-Type", content_type)
self.set_extra_headers(self.path)
if include_body:
content = self.get_content(self.absolute_path)
if isinstance(content, bytes):
content = [content]
for chunk in content:
try:
self.write(chunk)
yield self.flush()
except tornado.iostream.StreamClosedError:
return
else:
assert self.request.method == "HEAD"
def set_extra_headers(self, path):
if self._as_attachment:
filename = None
if callable(self._name_generator):
filename = self._name_generator(path)
if filename is None:
filename = os.path.basename(path)
filename = tornado.escape.url_escape(filename, plus=False)
self.set_header(
"Content-Disposition",
"attachment; filename=\"{}\"; filename*=UTF-8''{}".format(
filename, filename
),
)
if not self._allow_client_caching:
self.set_header("Cache-Control", "max-age=0, must-revalidate, private")
self.set_header("Expires", "-1")
self.set_header("X-Original-Content-Length", str(self.get_content_size()))
@property
def original_absolute_path(self):
"""The path of the uncompressed file corresponding to the compressed file"""
if self._is_pre_compressed:
return self.absolute_path.rstrip(".gz")
return self.absolute_path
def compute_etag(self):
if self._etag_generator is not None:
etag = self._etag_generator(self)
else:
etag = str(self.get_content_version(self.absolute_path))
if not etag.endswith('"'):
etag = f'"{etag}"'
return etag
# noinspection PyAttributeOutsideInit
def get_content_type(self):
if self._mime_type_guesser is not None:
type = self._mime_type_guesser(self.original_absolute_path)
if type is not None:
return type
correct_absolute_path = None
try:
# reset self.absolute_path temporarily
if self.should_use_precompressed():
correct_absolute_path = self.absolute_path
self.absolute_path = self.original_absolute_path
return tornado.web.StaticFileHandler.get_content_type(self)
finally:
# restore self.absolute_path
if self.should_use_precompressed() and correct_absolute_path is not None:
self.absolute_path = correct_absolute_path
@classmethod
def get_content_version(cls, abspath):
import os
import stat
return os.stat(abspath)[stat.ST_MTIME]
##~~ URL Forward Handler for forwarding requests to a preconfigured static URL
class UrlProxyHandler(
RequestlessExceptionLoggingMixin, CorsSupportMixin, tornado.web.RequestHandler
):
"""
`tornado.web.RequestHandler <http://tornado.readthedocs.org/en/branch4.0/web.html#request-handlers>`_ that proxies
requests to a preconfigured url and returns the response. Allows delivery of the requested content as attachment
and access validation through an optional callback.
This will use `tornado.httpclient.AsyncHTTPClient <http://tornado.readthedocs.org/en/branch4.0/httpclient.html#tornado.httpclient.AsyncHTTPClient>`_
for making the request to the configured endpoint and return the body of the client response with the status code
from the client response and the following headers:
* ``Date``, ``Cache-Control``, ``Expires``, ``ETag``, ``Server``, ``Content-Type`` and ``Location`` will be copied over.
* If ``as_attachment`` is set to True, ``Content-Disposition`` will be set to ``attachment``. If ``basename`` is
set including the attachment's ``filename`` attribute will be set to the base name followed by the extension
guessed based on the MIME type from the ``Content-Type`` header of the response. If no extension can be guessed
no ``filename`` attribute will be set.
Arguments:
url (str): URL to forward any requests to. A 404 response will be returned if this is not set. Defaults to ``None``.
as_attachment (bool): Whether to serve files with ``Content-Disposition: attachment`` header (``True``)
or not. Defaults to ``False``.
basename (str): base name of file names to return as part of the attachment header, see above. Defaults to ``None``.
access_validation (function): Callback to call in the ``get`` method to validate access to the resource. Will
be called with ``self.request`` as parameter which contains the full tornado request object. Should raise
a ``tornado.web.HTTPError`` if access is not allowed in which case the request will not be further processed.
Defaults to ``None`` and hence no access validation being performed.
"""
def initialize(
self, url=None, as_attachment=False, basename=None, access_validation=None
):
tornado.web.RequestHandler.initialize(self)
self._url = url
self._as_attachment = as_attachment
self._basename = basename
self._access_validation = access_validation
@tornado.gen.coroutine
def get(self, *args, **kwargs):
if self._access_validation is not None:
self._access_validation(self.request)
if self._url is None:
raise tornado.web.HTTPError(404)
client = tornado.httpclient.AsyncHTTPClient()
r = tornado.httpclient.HTTPRequest(
url=self._url,
method=self.request.method,
body=self.request.body,
headers=self.request.headers,
follow_redirects=False,
allow_nonstandard_methods=True,
)
try:
return client.fetch(r, self.handle_response)
except tornado.web.HTTPError as e:
if hasattr(e, "response") and e.response:
self.handle_response(e.response)
else:
raise tornado.web.HTTPError(500)
def handle_response(self, response):
if response.error and not isinstance(response.error, tornado.web.HTTPError):
raise tornado.web.HTTPError(500)
filename = None
self.set_status(response.code)
for name in (
"Date",
"Cache-Control",
"Server",
"Content-Type",
"Location",
"Expires",
"ETag",
):
value = response.headers.get(name)
if value:
self.set_header(name, value)
if name == "Content-Type":
filename = self.get_filename(value)
if self._as_attachment:
if filename is not None:
self.set_header(
"Content-Disposition", "attachment; filename=%s" % filename
)
else:
self.set_header("Content-Disposition", "attachment")
if response.body:
self.write(response.body)
self.finish()
def get_filename(self, content_type):
if not self._basename:
return None
typeValue = list(x.strip() for x in content_type.split(";"))
if len(typeValue) == 0:
return None
extension = mimetypes.guess_extension(typeValue[0])
if not extension:
return None
return f"{self._basename}{extension}"
class StaticDataHandler(
RequestlessExceptionLoggingMixin, CorsSupportMixin, tornado.web.RequestHandler
):
"""
`tornado.web.RequestHandler <http://tornado.readthedocs.org/en/branch4.0/web.html#request-handlers>`_ that returns
static ``data`` of a configured ``content_type``.
Arguments:
data (str): The data with which to respond
content_type (str): The content type with which to respond. Defaults to ``text/plain``
"""
def initialize(self, data="", content_type="text/plain"):
self.data = data
self.content_type = content_type
def get(self, *args, **kwargs):
self.set_status(200)
self.set_header("Content-Type", self.content_type)
self.write(self.data)
self.flush()
self.finish()
class DeprecatedEndpointHandler(CorsSupportMixin, tornado.web.RequestHandler):
"""
`tornado.web.RequestHandler <http://tornado.readthedocs.org/en/branch4.0/web.html#request-handlers>`_ that redirects
to another ``url`` and logs a deprecation warning.
Arguments:
url (str): URL to which to redirect
"""
def initialize(self, url):
self._url = url
self._logger = logging.getLogger(__name__)
def _handle_method(self, *args, **kwargs):
to_url = self._url.format(*args)
self._logger.info(
f"Redirecting deprecated endpoint {self.request.path} to {to_url}"
)
self.redirect(to_url, permanent=True)
# make all http methods trigger _handle_method
get = _handle_method
post = _handle_method
put = _handle_method
patch = _handle_method
delete = _handle_method
head = _handle_method
options = _handle_method
class StaticZipBundleHandler(CorsSupportMixin, tornado.web.RequestHandler):
def initialize(
self,
files=None,
as_attachment=True,
attachment_name=None,
access_validation=None,
compress=False,
):
if files is None:
files = []
if as_attachment and not attachment_name:
raise ValueError("attachment name must be set if as_attachment is True")
self._files = files
self._as_attachment = as_attachment
self._attachment_name = attachment_name
self._access_validator = access_validation
self._compress = compress
def get(self, *args, **kwargs):
if self._access_validator is not None:
self._access_validator(self.request)
return self.stream_zip(self._files)
def get_attachment_name(self):
return self._attachment_name
def normalize_files(self, files):
result = []
for f in files:
if isinstance(f, str):
result.append({"path": f})
elif isinstance(f, dict) and ("path" in f or "iter" in f or "content" in f):
result.append(f)
return result
@tornado.gen.coroutine
def stream_zip(self, files):
self.set_header("Content-Type", "application/zip")
if self._as_attachment:
self.set_header(
"Content-Disposition",
f'attachment; filename="{self.get_attachment_name()}"',
)
z = ZipStream(sized=True)
if self._compress:
try:
z = ZipStream(compress_type=ZIP_DEFLATED)
except RuntimeError:
# no zlib support
pass
for f in self.normalize_files(files):
name = f.get("name")
path = f.get("path")
data = f.get("iter") or f.get("content")
if path:
z.add_path(path, arcname=name)
elif data and name:
z.add(data, arcname=name)
if z.sized:
self.set_header("Content-Length", len(z))
self.set_header("Last-Modified", z.last_modified)
for chunk in z:
try:
self.write(chunk)
yield self.flush()
except tornado.iostream.StreamClosedError:
return
class DynamicZipBundleHandler(StaticZipBundleHandler):
# noinspection PyMethodOverriding
def initialize(
self,
path_validation=None,
path_processor=None,
as_attachment=True,
attachment_name=None,
access_validation=None,
compress=False,
):
if as_attachment and not attachment_name:
raise ValueError("attachment name must be set if as_attachment is True")
self._path_validator = path_validation
self._path_processor = path_processor
self._as_attachment = as_attachment
self._attachment_name = attachment_name
self._access_validator = access_validation
self._compress = compress
def get(self, *args, **kwargs):
if self._access_validator is not None:
self._access_validator(self.request)
files = list(
map(
octoprint.util.to_unicode, self.request.query_arguments.get("files", [])
)
)
return self._get_files_zip(files)
def post(self, *args, **kwargs):
if self._access_validator is not None:
self._access_validator(self.request)
import json
content_type = self.request.headers.get("Content-Type", "")
try:
if "application/json" in content_type:
data = json.loads(self.request.body)
else:
data = self.request.body_arguments
except Exception:
raise tornado.web.HTTPError(400)
return self._get_files_zip(
list(map(octoprint.util.to_unicode, data.get("files", [])))
)
def _get_files_zip(self, files):
files = self.normalize_files(files)
if not files:
raise tornado.web.HTTPError(400)
for f in files:
if "path" in f:
if callable(self._path_processor):
path = self._path_processor(f["path"])
if isinstance(path, tuple):
f["name"], f["path"] = path
else:
f["path"] = path
self._path_validator(f["path"])
return self.stream_zip(files)
class SystemInfoBundleHandler(CorsSupportMixin, tornado.web.RequestHandler):
# noinspection PyMethodOverriding
def initialize(self, access_validation=None):
self._access_validator = access_validation
@tornado.gen.coroutine
def get(self, *args, **kwargs):
if self._access_validator is not None:
self._access_validator(self.request)
from octoprint.cli.systeminfo import (
get_systeminfo,
get_systeminfo_bundle,
get_systeminfo_bundle_name,
)
from octoprint.server import (
connectivityChecker,
environmentDetector,
pluginManager,
printer,
safe_mode,
)
from octoprint.settings import settings
systeminfo = get_systeminfo(
environmentDetector,
connectivityChecker,
settings(),
{
"browser.user_agent": self.request.headers.get("User-Agent"),
"octoprint.safe_mode": safe_mode is not None,
"systeminfo.generator": "zipapi",
},
)
z = get_systeminfo_bundle(
systeminfo,
settings().getBaseFolder("logs"),
printer=printer,
plugin_manager=pluginManager,
)
self.set_header("Content-Type", "application/zip")
self.set_header(
"Content-Disposition",
f'attachment; filename="{get_systeminfo_bundle_name()}"',
)
if z.sized:
self.set_header("Content-Length", len(z))
self.set_header("Last-Modified", z.last_modified)
for chunk in z:
try:
self.write(chunk)
yield self.flush()
except tornado.iostream.StreamClosedError:
return
def get_attachment_name(self):
import time
return "octoprint-systeminfo-{}.zip".format(time.strftime("%Y%m%d%H%M%S"))
class GlobalHeaderTransform(tornado.web.OutputTransform):
HEADERS = {}
FORCED_HEADERS = {}
REMOVED_HEADERS = []
@classmethod
def for_headers(cls, name, headers=None, forced_headers=None, removed_headers=None):
if headers is None:
headers = {}
if forced_headers is None:
forced_headers = {}
if removed_headers is None:
removed_headers = []
return type(
name,
(GlobalHeaderTransform,),
{
"HEADERS": headers,
"FORCED_HEADERS": forced_headers,
"REMOVED_HEADERS": removed_headers,
},
)
def __init__(self, request):
tornado.web.OutputTransform.__init__(self, request)
def transform_first_chunk(self, status_code, headers, chunk, finishing):
for header, value in self.HEADERS.items():
if header not in headers:
headers[header] = value
for header, value in self.FORCED_HEADERS.items():
headers[header] = value
for header in self.REMOVED_HEADERS:
del headers[header]
return status_code, headers, chunk
# ~~ Factory method for creating Flask access validation wrappers from the Tornado request context
def access_validation_factory(app, validator, *args):
"""
Creates an access validation wrapper using the supplied validator.
:param validator: the access validator to use inside the validation wrapper
:return: an access validator taking a request as parameter and performing the request validation
"""
# noinspection PyProtectedMember
def f(request):
"""
Creates a custom wsgi and Flask request context in order to be able to process user information
stored in the current session.
:param request: The Tornado request for which to create the environment and context
"""
import flask
wsgi_environ = WsgiInputContainer.environ(request)
with app.request_context(wsgi_environ):
session = app.session_interface.open_session(app, flask.request)
user_id = session.get("_user_id")
user = None
# Yes, using protected methods is ugly. But these used to be publicly available in former versions
# of flask-login, there are no replacements, and seeing them renamed & hidden in a minor version release
# without any mention in the changelog means the public API ain't strictly stable either, so we might
# as well make our life easier here and just use them...
if user_id is not None and app.login_manager._user_callback is not None:
user = app.login_manager._user_callback(user_id)
app.login_manager._update_request_context_with_user(user)
validator(flask.request, *args)
return f
def path_validation_factory(path_filter, status_code=404):
"""
Creates a request path validation wrapper returning the defined status code if the supplied path_filter returns False.
:param path_filter: the path filter to use on the requested path, should return False for requests that should
be responded with the provided error code.
:return: a request path validator taking a request path as parameter and performing the request validation
"""
def f(path):
if not path_filter(path):
raise tornado.web.HTTPError(status_code)
return f
def validation_chain(*validators):
def f(request):
for validator in validators:
validator(request)
return f
|
extractor | lego | # coding: utf-8
from __future__ import unicode_literals
import re
import uuid
from ..compat import compat_HTTPError
from ..utils import ExtractorError, int_or_none, qualities
from .common import InfoExtractor
class LEGOIE(InfoExtractor):
_VALID_URL = r"https?://(?:www\.)?lego\.com/(?P<locale>[a-z]{2}-[a-z]{2})/(?:[^/]+/)*videos/(?:[^/]+/)*[^/?#]+-(?P<id>[0-9a-f]{32})"
_TESTS = [
{
"url": "http://www.lego.com/en-us/videos/themes/club/blocumentary-kawaguchi-55492d823b1b4d5e985787fa8c2973b1",
"md5": "f34468f176cfd76488767fc162c405fa",
"info_dict": {
"id": "55492d82-3b1b-4d5e-9857-87fa8c2973b1_en-US",
"ext": "mp4",
"title": "Blocumentary Great Creations: Akiyuki Kawaguchi",
"description": "Blocumentary Great Creations: Akiyuki Kawaguchi",
},
},
{
# geo-restricted but the contentUrl contain a valid url
"url": "http://www.lego.com/nl-nl/videos/themes/nexoknights/episode-20-kingdom-of-heroes-13bdc2299ab24d9685701a915b3d71e7##sp=399",
"md5": "c7420221f7ffd03ff056f9db7f8d807c",
"info_dict": {
"id": "13bdc229-9ab2-4d96-8570-1a915b3d71e7_nl-NL",
"ext": "mp4",
"title": "Aflevering 20: Helden van het koninkrijk",
"description": "md5:8ee499aac26d7fa8bcb0cedb7f9c3941",
"age_limit": 5,
},
},
{
# with subtitle
"url": "https://www.lego.com/nl-nl/kids/videos/classic/creative-storytelling-the-little-puppy-aa24f27c7d5242bc86102ebdc0f24cba",
"info_dict": {
"id": "aa24f27c-7d52-42bc-8610-2ebdc0f24cba_nl-NL",
"ext": "mp4",
"title": "De kleine puppy",
"description": "md5:5b725471f849348ac73f2e12cfb4be06",
"age_limit": 1,
"subtitles": {
"nl": [
{
"ext": "srt",
"url": r"re:^https://.+\.srt$",
}
],
},
},
"params": {
"skip_download": True,
},
},
]
_QUALITIES = {
"Lowest": (64, 180, 320),
"Low": (64, 270, 480),
"Medium": (96, 360, 640),
"High": (128, 540, 960),
"Highest": (128, 720, 1280),
}
def _real_extract(self, url):
locale, video_id = re.match(self._VALID_URL, url).groups()
countries = [locale.split("-")[1].upper()]
self._initialize_geo_bypass(
{
"countries": countries,
}
)
try:
item = self._download_json(
# https://contentfeed.services.lego.com/api/v2/item/[VIDEO_ID]?culture=[LOCALE]&contentType=Video
"https://services.slingshot.lego.com/mediaplayer/v2",
video_id,
query={
"videoId": "%s_%s" % (uuid.UUID(video_id), locale),
},
headers=self.geo_verification_headers(),
)
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 451:
self.raise_geo_restricted(countries=countries)
raise
video = item["Video"]
video_id = video["Id"]
title = video["Title"]
q = qualities(["Lowest", "Low", "Medium", "High", "Highest"])
formats = []
for video_source in item.get("VideoFormats", []):
video_source_url = video_source.get("Url")
if not video_source_url:
continue
video_source_format = video_source.get("Format")
if video_source_format == "F4M":
formats.extend(
self._extract_f4m_formats(
video_source_url,
video_id,
f4m_id=video_source_format,
fatal=False,
)
)
elif video_source_format == "M3U8":
formats.extend(
self._extract_m3u8_formats(
video_source_url,
video_id,
"mp4",
"m3u8_native",
m3u8_id=video_source_format,
fatal=False,
)
)
else:
video_source_quality = video_source.get("Quality")
format_id = []
for v in (video_source_format, video_source_quality):
if v:
format_id.append(v)
f = {
"format_id": "-".join(format_id),
"quality": q(video_source_quality),
"url": video_source_url,
}
quality = self._QUALITIES.get(video_source_quality)
if quality:
(
f.update(
{
"abr": quality[0],
"height": quality[1],
"width": quality[2],
}
),
)
formats.append(f)
self._sort_formats(formats)
subtitles = {}
sub_file_id = video.get("SubFileId")
if sub_file_id and sub_file_id != "00000000-0000-0000-0000-000000000000":
net_storage_path = video.get("NetstoragePath")
invariant_id = video.get("InvariantId")
video_file_id = video.get("VideoFileId")
video_version = video.get("VideoVersion")
if net_storage_path and invariant_id and video_file_id and video_version:
subtitles.setdefault(locale[:2], []).append(
{
"url": "https://lc-mediaplayerns-live-s.legocdn.com/public/%s/%s_%s_%s_%s_sub.srt"
% (
net_storage_path,
invariant_id,
video_file_id,
locale,
video_version,
),
}
)
return {
"id": video_id,
"title": title,
"description": video.get("Description"),
"thumbnail": video.get("GeneratedCoverImage")
or video.get("GeneratedThumbnail"),
"duration": int_or_none(video.get("Length")),
"formats": formats,
"subtitles": subtitles,
"age_limit": int_or_none(video.get("AgeFrom")),
"season": video.get("SeasonTitle"),
"season_number": int_or_none(video.get("Season")) or None,
"episode_number": int_or_none(video.get("Episode")) or None,
}
|
deluge-webui | core | #
# Copyright (C) 2009 Damien Churchill <damoxc@gmail.com>
#
# Basic plugin template created by:
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import deluge.component as component
from deluge import configmanager
from deluge.core.rpcserver import export
from deluge.plugins.pluginbase import CorePluginBase
from twisted.internet import defer
from twisted.internet.error import CannotListenError
try:
from deluge.ui.web import server
except ImportError:
server = False
log = logging.getLogger(__name__)
DEFAULT_PREFS = {"enabled": False, "ssl": False, "port": 8112}
class Core(CorePluginBase):
server = None
def enable(self):
self.config = configmanager.ConfigManager("web_plugin.conf", DEFAULT_PREFS)
if self.config["enabled"]:
self.start_server()
def disable(self):
self.stop_server()
def update(self):
pass
def _on_stop(self, *args):
return self.start_server()
@export
def got_deluge_web(self):
"""Status of deluge-web module installation.
Check if deluge.ui.web.server modulge is installed and has been successfully imported.
Returns:
bool: True is deluge-web is installed and available, otherwise False.
"""
return bool(server)
def start_server(self):
if not self.server:
if not self.got_deluge_web():
return False
try:
self.server = component.get("DelugeWeb")
except KeyError:
self.server = server.DelugeWeb(daemon=False)
self.server.port = self.config["port"]
self.server.https = self.config["ssl"]
try:
self.server.start()
except CannotListenError as ex:
log.warning("Failed to start WebUI server: %s", ex)
raise
return True
def stop_server(self):
if self.server:
return self.server.stop()
return defer.succeed(True)
def restart_server(self):
return self.stop_server().addCallback(self._on_stop)
@export
def set_config(self, config):
"""Sets the config dictionary."""
action = None
if "enabled" in config:
if config["enabled"] != self.config["enabled"]:
action = config["enabled"] and "start" or "stop"
if "ssl" in config:
if not action:
action = "restart"
for key in config:
self.config[key] = config[key]
self.config.save()
if action == "start":
return self.start_server()
elif action == "stop":
return self.stop_server()
elif action == "restart":
return self.restart_server()
@export
def get_config(self):
"""Returns the config dictionary."""
return self.config.config
|
saveddata | mutatedMixin | # ===============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of eos.
#
# eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with eos. If not, see <http://www.gnu.org/licenses/>.
# ===============================================================================
import eos.db
from logbook import Logger
pyfalog = Logger(__name__)
class MutaError(Exception):
pass
class MutatedMixin:
@property
def isMutated(self):
return bool(self.baseItemID and self.mutaplasmidID)
@property
def baseItem(self):
return self.__baseItem
@property
def mutaplasmid(self):
return self.__mutaplasmid
@property
def fullName(self):
if self.isMutated:
mutaShortName = self.mutaplasmid.shortName
mutaFullName = self.mutaplasmid.item.customName
# Short name can be unavailable for non-english language
if mutaShortName != mutaFullName:
return f"{self.mutaplasmid.shortName} {self.baseItem.customName}"
return self.item.customName
def _mutaInit(self, baseItem, mutaplasmid):
self.baseItemID = baseItem.ID if baseItem is not None else None
self.mutaplasmidID = mutaplasmid.ID if mutaplasmid is not None else None
if baseItem is not None:
# we're working with a mutated module, need to get abyssal module loaded with the base attributes
# Note: there may be a better way of doing this, such as a metho on this classe to convert(mutaplamid). This
# will require a bit more research though, considering there has never been a need to "swap" out the item of a Module
# before, and there may be assumptions taken with regards to the item never changing (pre-calculated / cached results, for example)
self._item = eos.db.getItemWithBaseItemAttribute(
self._item.ID, self.baseItemID
)
self.__baseItem = baseItem
self.__mutaplasmid = mutaplasmid
else:
self.__baseItem = None
self.__mutaplasmid = None
def _mutaReconstruct(self):
self.__baseItem = None
self.__mutaplasmid = None
if self.baseItemID:
self._item = eos.db.getItemWithBaseItemAttribute(
self.itemID, self.baseItemID
)
self.__baseItem = eos.db.getItem(self.baseItemID)
self.__mutaplasmid = eos.db.getMutaplasmid(self.mutaplasmidID)
if self.__baseItem is None:
pyfalog.error("Base Item (id: {0}) does not exist", self.itemID)
raise MutaError
def _mutaLoadMutators(self, mutatorClass):
# Instantiate / remove mutators if this is a mutated module
if self.__baseItem:
for x in self.mutaplasmid.attributes:
attr = self.item.attributes[x.name]
id = attr.ID
if id not in self.mutators: # create the mutator
mutatorClass(self, attr, attr.value)
# @todo: remove attributes that are no longer part of the mutaplasmid.
@property
def _mutaIsInvalid(self):
if self.item.isAbyssal and not self.isMutated:
return True
if self.isMutated and not self.__mutaplasmid:
return True
return False
def _mutaApplyMutators(self, mutatorClass, targetInstance=None):
if targetInstance is None:
targetInstance = self
for x in self.mutators.values():
mutatorClass(targetInstance, x.attribute, x.value)
|
views | goal | """ non-interactive pages """
from bookwyrm import forms, models
from bookwyrm.status import create_generated_note
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from django.template.loader import get_template
from django.template.response import TemplateResponse
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views import View
from django.views.decorators.http import require_POST
from .helpers import get_user_from_username
# pylint: disable= no-self-use
@method_decorator(login_required, name="dispatch")
class Goal(View):
"""track books for the year"""
def get(self, request, username, year):
"""reading goal page"""
user = get_user_from_username(request.user, username)
year = int(year)
goal = models.AnnualGoal.objects.filter(year=year, user=user).first()
if not goal and user != request.user:
return HttpResponseNotFound()
current_year = timezone.now().year
if not goal and year != timezone.now().year:
return redirect("user-goal", username, current_year)
if goal:
goal.raise_visible_to_user(request.user)
data = {
"goal_form": forms.GoalForm(instance=goal),
"goal": goal,
"user": user,
"year": year,
"is_self": request.user == user,
}
return TemplateResponse(request, "user/goal.html", data)
def post(self, request, username, year):
"""update or create an annual goal"""
year = int(year)
user = get_user_from_username(request.user, username)
goal = models.AnnualGoal.objects.filter(year=year, user=user).first()
form = forms.GoalForm(request.POST, instance=goal)
if not form.is_valid():
data = {
"goal_form": form,
"goal": goal,
"year": year,
}
return TemplateResponse(request, "user/goal.html", data)
goal = form.save(request)
if request.POST.get("post-status"):
# create status, if appropriate
template = get_template("snippets/generated_status/goal.html")
create_generated_note(
request.user,
template.render({"goal": goal, "user": user}).strip(),
privacy=goal.privacy,
)
return redirect("user-goal", request.user.localname, year)
@require_POST
@login_required
def hide_goal(request):
"""don't keep bugging people to set a goal"""
request.user.show_goal = False
request.user.save(broadcast=False, update_fields=["show_goal"])
return redirect("/")
|
PyObjCTest | test_archive_python | """
Testcases for NSArchive-ing python objects.
(Implementation is incomplete)
"""
import os
import pickle
import sys
if sys.version_info[0] == 3:
import copyreg
else:
import copy_reg as copyreg
import objc._pycoder as pycoder
from PyObjCTest.fnd import (
NSArchiver,
NSArray,
NSData,
NSDictionary,
NSKeyedArchiver,
NSKeyedUnarchiver,
NSMutableArray,
NSMutableDictionary,
NSUnarchiver,
)
from PyObjCTools.TestSupport import *
#
# First set of tests: the stdlib tests for pickling, this
# should test everything but mixed Python/Objective-C
# object-graphs.
#
if sys.version_info[0] == 3:
unicode = str
long = int
import test.pickletester
MyList = test.pickletester.MyList
class reduce_global(object):
def __reduce__(self):
return "reduce_global"
reduce_global = reduce_global()
# Quick hack to add a proper __repr__ to class C in
# pickletester, makes it a lot easier to debug.
def C__repr__(self):
return "<%s instance at %#x: %r>" % (
self.__class__.__name__,
id(self),
self.__dict__,
)
test.pickletester.C.__repr__ = C__repr__
del C__repr__
class myobject:
def __init__(self):
pass
def __getinitargs__(self):
return (1, 2)
class state_obj_1:
def __getstate__(self):
return ({"a": 1, 42: 3}, {"b": 2})
class mystr(str):
__slots__ = ()
class myint(int):
__slots__ = ()
def a_function():
pass
class a_classic_class:
pass
class a_classic_class_with_state:
def __getstate__(self):
return {"a": 1}
def __setstate__(self, state):
for k, v in state.items():
setattr(self, k, v)
class a_newstyle_class(object):
pass
class newstyle_with_slots(object):
__slots__ = ("a", "b", "__dict__")
class newstyle_with_setstate(object):
def __setstate__(self, state):
self.state = state
def make_instance(state):
o = a_reducing_class()
o.__dict__.update(state)
return o
class a_reducing_class(object):
def __reduce__(self):
return make_instance, (self.__dict__,)
class TestKeyedArchiveSimple(TestCase):
def setUp(self):
self.archiverClass = NSKeyedArchiver
self.unarchiverClass = NSKeyedUnarchiver
def test_unknown_type(self):
try:
orig = pycoder.decode_dispatch[pycoder.kOP_GLOBAL]
del pycoder.decode_dispatch[pycoder.kOP_GLOBAL]
o = TestKeyedArchiveSimple
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertRaises(
pickle.UnpicklingError,
self.unarchiverClass.unarchiveObjectWithData_,
buf,
)
finally:
pycoder.decode_dispatch[pycoder.kOP_GLOBAL] = orig
def test_reducing_issues(self):
class Error1(object):
def __reduce__(self):
return dir, "foo"
object1 = Error1()
self.assertRaises(
pickle.PicklingError,
self.archiverClass.archivedDataWithRootObject_,
object1,
)
class Error2(object):
def __reduce__(self):
return "foo", (1, 2)
object2 = Error2()
self.assertRaises(
pickle.PicklingError,
self.archiverClass.archivedDataWithRootObject_,
object2,
)
def test_various_objects(self):
o = a_newstyle_class()
o.attr1 = False
o.attr2 = None
o.__dict__[42] = 3
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, a_newstyle_class)
self.assertEqual(v.__dict__, o.__dict__)
def test_misc_globals(self):
global mystr
orig = mystr
try:
del mystr
o = orig("hello")
self.assertRaises(
pickle.PicklingError, self.archiverClass.archivedDataWithRootObject_, o
)
finally:
mystr = orig
try:
mystr = None
o = orig("hello")
self.assertRaises(
pickle.PicklingError, self.archiverClass.archivedDataWithRootObject_, o
)
finally:
mystr = orig
try:
copyreg.add_extension(
a_newstyle_class.__module__, a_newstyle_class.__name__, 42
)
self.assertIn(
(a_newstyle_class.__module__, a_newstyle_class.__name__),
copyreg._extension_registry,
)
o = a_newstyle_class
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIs(v, o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIs(v, o)
copyreg.remove_extension(
a_newstyle_class.__module__, a_newstyle_class.__name__, 42
)
self.assertRaises(
ValueError, self.unarchiverClass.unarchiveObjectWithData_, buf
)
finally:
mystr = orig
try:
copyreg.remove_extension(
a_newstyle_class.__module__, a_newstyle_class.__name__, 42
)
except ValueError:
pass
def f():
pass
del f.__module__
try:
sys.f = f
buf = self.archiverClass.archivedDataWithRootObject_(f)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIs(v, f)
finally:
del f
@onlyPython2
def test_invalid_initargs(self):
v = myobject()
buf = self.archiverClass.archivedDataWithRootObject_(v)
self.assertIsInstance(buf, NSData)
self.assertRaises(TypeError, self.unarchiverClass.unarchiveObjectWithData_, buf)
def test_class_with_slots(self):
# Test dumpling a class with slots
o = newstyle_with_slots()
o.a = 1
o.b = 2
o.c = 3
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, newstyle_with_slots)
self.assertEqual(v.a, 1)
self.assertEqual(v.b, 2)
self.assertEqual(v.c, 3)
@onlyPython2
def test_class_with_state(self):
o = state_obj_1()
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, state_obj_1)
self.assertEqual(v.a, 1)
self.assertEqual(v.b, 2)
self.assertEqual(v.__dict__[42], 3)
def test_class_with_setstate(self):
o = newstyle_with_setstate()
o.a = 1
o.b = 2
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, newstyle_with_setstate)
self.assertEqual(v.state, {"a": 1, "b": 2})
def test_reduce_as_global(self):
# Test class where __reduce__ returns a string (the name of a global)
o = reduce_global
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIs(v, reduce_global)
def test_reduce_invalid(self):
class invalid_reduce(object):
def __reduce__(self):
return 42
self.assertRaises(
pickle.PicklingError,
self.archiverClass.archivedDataWithRootObject_,
invalid_reduce(),
)
class invalid_reduce(object):
def __reduce__(self):
return (1,)
self.assertRaises(
pickle.PicklingError,
self.archiverClass.archivedDataWithRootObject_,
invalid_reduce(),
)
class invalid_reduce(object):
def __reduce__(self):
return (1, 2, 3, 4, 5, 6)
self.assertRaises(
pickle.PicklingError,
self.archiverClass.archivedDataWithRootObject_,
invalid_reduce(),
)
def test_basic_objects(self):
buf = self.archiverClass.archivedDataWithRootObject_(a_function)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIs(v, a_function)
buf = self.archiverClass.archivedDataWithRootObject_(a_classic_class)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIs(v, a_classic_class)
buf = self.archiverClass.archivedDataWithRootObject_(a_newstyle_class)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIs(v, a_newstyle_class)
o = a_classic_class()
o.x = 42
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, a_classic_class)
self.assertEqual(v.x, 42)
o = a_classic_class_with_state()
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, a_classic_class_with_state)
self.assertEqual(v.a, 1)
for o in (
None,
[None],
(None,),
{
None,
},
):
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertEqual(o, v)
for o in (True, False, [True]):
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertEqual(o, v)
o = ("aap", 42)
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, tuple)
self.assertEqual(o, v)
o = ["aap", 42]
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, list)
self.assertEqual(o, v)
o = {"aap": "monkey", "noot": "nut"}
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, dict)
self.assertEqual(o, v)
o = {1, 2, 3}
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, set)
self.assertEqual(o, v)
o = "hello world"
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, str)
self.assertEqual(o, v)
o = b"hello world"
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, bytes)
self.assertEqual(o, v)
o = b"hello world".decode("ascii")
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, type(o))
self.assertEqual(o, v)
o = mystr("hello world")
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, mystr)
self.assertEqual(o, v)
o = myint(4)
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, myint)
self.assertEqual(o, v)
o = 42.5
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, float)
self.assertEqual(o, v)
if sys.version_info[0] == 2:
buf = self.archiverClass.archivedDataWithRootObject_(unicode("hello"))
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, unicode)
buf = self.archiverClass.archivedDataWithRootObject_("hello")
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, str)
self.assertEqual(v, "hello")
buf = self.archiverClass.archivedDataWithRootObject_(sys.maxsize * 4)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, long)
self.assertEqual(v, sys.maxsize * 4)
buf = self.archiverClass.archivedDataWithRootObject_(sys.maxsize**4)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, long)
self.assertEqual(v, sys.maxsize**4)
def testSimpleLists(self):
o = []
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, list)
self.assertEqual(v, o)
o = [unicode("hello"), 42]
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, list)
self.assertEqual(v, o)
def testSimpleTuples(self):
o = ()
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, tuple)
self.assertEqual(v, o)
o = (unicode("hello"), 42)
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, tuple)
self.assertEqual(v, o)
def testSimpleDicts(self):
o = {}
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, dict)
self.assertEqual(v, o)
o = {unicode("hello"): unicode("bar"), 42: 1.5}
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, dict)
self.assertEqual(v, o)
def testNestedDicts(self):
o = {unicode("hello"): {1: 2}, unicode("world"): unicode("foobar")}
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, dict)
self.assertEqual(v, o)
o = {}
o[unicode("self")] = o
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, dict)
self.assertIs(v[unicode("self")], v)
def testNestedSequences(self):
o = [1, 2, 3, (5, (unicode("a"), unicode("b")), 6), {1: 2}]
o[-1] = o
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, list)
self.assertIs(v[-1], v)
self.assertEqual(v[:-1], o[:-1])
def testNestedInstance(self):
o = a_classic_class()
o.value = o
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, a_classic_class)
self.assertIs(v.value, v)
def dont_testNestedInstanceWithReduce(self):
# Test recursive instantation with a __reduce__ method
#
# This test is disabled because pickle doesn't support
# this (and we don't either)
o = a_reducing_class()
o.value = o
import pickle
b = pickle.dumps(o)
o2 = pickle.loads(b)
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, a_reducing_class)
self.assertIs(v.value, v)
def test_reducing_object(self):
o = a_reducing_class()
o.value = 42
buf = self.archiverClass.archivedDataWithRootObject_(o)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, a_reducing_class)
self.assertEqual(o.value, 42)
def testRecusiveNesting(self):
l = []
d = {1: l}
i = a_classic_class()
i.attr = d
l.append(i)
buf = self.archiverClass.archivedDataWithRootObject_(l)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertEqual(len(v), 1)
self.assertEqual(dir(v[0]), dir(i))
self.assertEqual(list(v[0].attr.keys()), [1])
self.assertIs(v[0].attr[1], v)
buf = self.archiverClass.archivedDataWithRootObject_(d)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIs(v[1][0].attr, v)
def testTupleOfObjects(self):
o = a_classic_class()
t = (o, o, o)
buf = self.archiverClass.archivedDataWithRootObject_(t)
self.assertIsInstance(buf, NSData)
v = self.unarchiverClass.unarchiveObjectWithData_(buf)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 3)
self.assertIsInstance(v[0], a_classic_class)
self.assertIs(v[0], v[1])
self.assertIs(v[0], v[2])
class TestArchiveSimple(TestKeyedArchiveSimple):
def setUp(self):
self.archiverClass = NSArchiver
self.unarchiverClass = NSUnarchiver
class TestKeyedArchivePlainPython(TestCase, test.pickletester.AbstractPickleTests):
# Ensure that we don't run every test case three times
def setUp(self):
self._protocols = test.pickletester.protocols
test.pickletester.protocols = (2,)
def tearDown(self):
test.pickletester.protocols = self._protocols
def dumps(self, arg, proto=0, fast=0):
# Ignore proto and fast
return NSKeyedArchiver.archivedDataWithRootObject_(arg)
def loads(self, buf):
return NSKeyedUnarchiver.unarchiveObjectWithData_(buf)
# Disable a number of methods, these test things we're not interested in.
# (Most of these look at the generated byte-stream, as we're not writing data in pickle's
# format such tests are irrelevant to archiving support)
@onlyIf(0, "python unittest not relevant for archiving")
def test_negative_put(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_int_pickling_efficiency(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_dynamic_class(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_ellipsis(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_notimplemented(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_load_classic_instance(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_insecure_strings(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_load_from_canned_string(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_maxint64(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_dict_chunking(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_float_format(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_garyp(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_list_chunking(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_singletons(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_simple_newobj(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_short_tuples(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_proto(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_long1(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_long4(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_get(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_load_from_data0(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_load_from_data1(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_load_from_data2(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_unpickle_from_2x(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_pickle_to_2x(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_bad_getattr(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_unicode(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_maxsize64(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_empty_bytestring(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_pop_empty_stack(self):
pass
def test_long(self):
# The real test_long method takes way to much time, test a subset
x = 12345678910111213141516178920 << (256 * 8)
buf = self.dumps(x)
v = self.loads(buf)
self.assertEqual(v, x)
x = -x
buf = self.dumps(x)
v = self.loads(buf)
self.assertEqual(v, x)
for val in (long(0), long(1), long(sys.maxsize), long(sys.maxsize * 128)):
for x in val, -val:
buf = self.dumps(x)
v = self.loads(buf)
self.assertEqual(v, x)
# Overriden tests for extension codes, the test code checks
# the actual byte stream.
def produce_global_ext(self, extcode, opcode):
e = test.pickletester.ExtensionSaver(extcode)
try:
copyreg.add_extension(__name__, "MyList", extcode)
x = MyList([1, 2, 3])
x.foo = 42
x.bar = "hello"
s1 = self.dumps(x, 1)
y = self.loads(s1)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
finally:
e.restore()
#
# The test_reduce* methods iterate over various protocol
# versions. Override to only look at protocol version 2.
#
def test_reduce_overrides_default_reduce_ex(self):
for proto in (2,):
x = test.pickletester.REX_one()
self.assertEqual(x._reduce_called, 0)
s = self.dumps(x, proto)
self.assertEqual(x._reduce_called, 1)
y = self.loads(s)
self.assertEqual(y._reduce_called, 0)
def test_reduce_ex_called(self):
for proto in (2,):
x = test.pickletester.REX_two()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, None)
def test_reduce_ex_overrides_reduce(self):
for proto in (2,):
x = test.pickletester.REX_three()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, None)
def test_reduce_ex_calls_base(self):
for proto in (2,):
x = test.pickletester.REX_four()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, proto)
def test_reduce_calls_base(self):
for proto in (2,):
x = test.pickletester.REX_five()
self.assertEqual(x._reduce_called, 0)
s = self.dumps(x, proto)
self.assertEqual(x._reduce_called, 1)
y = self.loads(s)
self.assertEqual(y._reduce_called, 1)
class TestArchivePlainPython(TestKeyedArchivePlainPython):
def setUp(self):
self._protocols = test.pickletester.protocols
test.pickletester.protocols = (2,)
def tearDown(self):
test.pickletester.protocols = self._protocols
def dumps(self, arg, proto=0, fast=0):
# Ignore proto and fast
return NSArchiver.archivedDataWithRootObject_(arg)
def loads(self, buf):
return NSUnarchiver.unarchiveObjectWithData_(buf)
@onlyIf(0, "python unittest not relevant for archiving")
def test_negative_put(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_int_pickling_efficiency(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_negative_32b_binunicode(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_negative_32b_binput(self):
pass
@onlyIf(0, "python unittest not relevant for archiving")
def test_negative_32b_binbytes(self):
pass
#
# Disable testing of plain Archiving for now, need full support
# for keyed-archiving first, then worry about adding "classic"
# archiving.
#
# class TestArchivePlainPython (TestKeyedArchivePlainPython):
# def dumps(self, arg, proto=0, fast=0):
# # Ignore proto and fast
# return NSArchiver.archivedDataWithRootObject_(arg)
#
# def loads(self, buf):
# return NSUnarchiver.unarchiveObjectWithData_(buf)
#
# Second set of tests: test if archiving a graph that
# contains both python and objective-C objects works correctly.
#
class TestKeyedArchiveMixedGraphs(TestCase):
def dumps(self, arg, proto=0, fast=0):
# Ignore proto and fast
return NSKeyedArchiver.archivedDataWithRootObject_(arg)
def loads(self, buf):
return NSKeyedUnarchiver.unarchiveObjectWithData_(buf)
def test_list1(self):
o1 = a_classic_class()
o2 = a_newstyle_class()
o2.lst = NSArray.arrayWithObject_(o1)
l = NSArray.arrayWithArray_([o1, o2, [o1, o2]])
buf = self.dumps(l)
self.assertIsInstance(buf, NSData)
out = self.loads(buf)
self.assertIsInstance(out, NSArray)
self.assertEqual(len(out), 3)
p1 = out[0]
p2 = out[1]
p3 = out[2]
self.assertIsInstance(p1, a_classic_class)
self.assertIsInstance(p2, a_newstyle_class)
self.assertIsInstance(p3, list)
self.assertIs(p3[0], p1)
self.assertIs(p3[1], p2)
self.assertIsInstance(p2.lst, NSArray)
self.assertIs(p2.lst[0], p1)
class TestArchiveMixedGraphs(TestKeyedArchiveMixedGraphs):
def dumps(self, arg, proto=0, fast=0):
# Ignore proto and fast
return NSArchiver.archivedDataWithRootObject_(arg)
def loads(self, buf):
return NSUnarchiver.unarchiveObjectWithData_(buf)
#
# And finally some tests to check if archiving of Python
# subclasses of NSObject works correctly.
#
class TestArchivePythonObjCSubclass(TestCase):
pass
if __name__ == "__main__":
main()
|
versions | 063_8b633852cb7a_org_changes | # encoding: utf-8
"""063 Org changes
Revision ID: 8b633852cb7a
Revises: 6deb2bbab394
Create Date: 2018-09-04 18:49:10.608831
"""
import sqlalchemy as sa
from alembic import op
from ckan.migration import skip_based_on_legacy_engine_version
# revision identifiers, used by Alembic.
revision = "8b633852cb7a"
down_revision = "6deb2bbab394"
branch_labels = None
depends_on = None
def upgrade():
if skip_based_on_legacy_engine_version(op, __name__):
return
op.add_column("user", sa.Column("sysadmin", sa.Boolean, server_default="FALSE"))
op.add_column("package", sa.Column("owner_org", sa.UnicodeText))
op.add_column("package", sa.Column("private", sa.Boolean, server_default="FALSE"))
op.add_column("package_revision", sa.Column("owner_org", sa.UnicodeText))
op.add_column(
"package_revision", sa.Column("private", sa.Boolean, server_default="FALSE")
)
op.add_column(
"group", sa.Column("is_organization", sa.Boolean, server_default="FALSE")
)
op.add_column(
"group_revision",
sa.Column("is_organization", sa.Boolean, server_default="FALSE"),
)
def downgrade():
op.drop_column("user", "sysadmin")
op.drop_column("package", "owner_org")
op.drop_column("package", "private")
op.drop_column("package_revision", "owner_org")
op.drop_column("package_revision", "private")
op.drop_column("group", "is_organization")
op.drop_column("group_revision", "is_organization")
|
general | metadata | from gaphor.core.modeling.properties import attribute
from gaphor.core.styling import (
FontWeight,
JustifyContent,
Style,
TextAlign,
VerticalAlign,
)
from gaphor.diagram.presentation import ElementPresentation
from gaphor.diagram.shapes import (
Box,
Orientation,
Text,
draw_border,
draw_left_separator,
draw_top_separator,
)
class MetadataItem(ElementPresentation):
createdBy: attribute[str] = attribute("createdBy", str, "")
description: attribute[str] = attribute("description", str, "")
website: attribute[str] = attribute("website", str, "")
revision: attribute[str] = attribute("revision", str, "")
license: attribute[str] = attribute("license", str, "")
createdOn: attribute[str] = attribute("createdOn", str, "")
updatedOn: attribute[str] = attribute("updatedOn", str, "")
def __init__(self, diagram, id=None):
super().__init__(diagram, id)
self.watch("createdBy", self.update_shapes).watch(
"website", self.update_shapes
).watch("description", self.update_shapes).watch(
"revision", self.update_shapes
).watch("license", self.update_shapes).watch(
"createdOn", self.update_shapes
).watch("updatedOn", self.update_shapes)
def update_shapes(self, event=None):
diagram = self.diagram
group_style: Style = {"justify-content": JustifyContent.STRETCH}
box_style: Style = {"padding": (4, 4, 4, 4)}
text_style: Style = {
"text-align": TextAlign.LEFT,
"vertical-align": VerticalAlign.TOP,
}
heading_style: Style = {
**text_style,
"font-size": "small",
"font-weight": FontWeight.BOLD,
}
self.shape = Box(
*(
[
Box(
*(
[
Box(
Text(
text=f'{diagram.gettext("Created By")}:',
style=heading_style,
),
Text(
text=lambda: self.createdBy or "",
style=text_style,
),
style=box_style,
)
]
if self.createdBy
else []
),
*(
[
Box(
Text(
text=f'{diagram.gettext("Website")}:',
style=heading_style,
),
Text(
text=lambda: self.website or "",
style=text_style,
),
style=box_style,
draw=draw_left_separator,
)
]
if self.website
else []
),
orientation=Orientation.HORIZONTAL,
style=group_style,
)
]
if self.createdBy or self.website
else []
),
*(
[
Box(
Text(
text=f'{diagram.gettext("Description")}:',
style=heading_style,
),
Text(
text=lambda: self.description or "",
style=text_style,
),
style=box_style,
draw=draw_top_separator,
)
]
if self.description
else []
),
*(
[
Box(
*(
[
Box(
Text(
text=f'{diagram.gettext("Revision")}:',
style=heading_style,
),
Text(
text=lambda: self.revision or "",
style=text_style,
),
style=box_style,
)
]
if self.revision
else []
),
*(
[
Box(
Text(
text=f'{diagram.gettext("License")}:',
style=heading_style,
),
Text(
text=lambda: self.license or "",
style=text_style,
),
style=box_style,
draw=draw_left_separator,
)
]
if self.license
else []
),
*(
[
Box(
Text(
text=f'{diagram.gettext("Created On")}:',
style=heading_style,
),
Text(
text=lambda: self.createdOn or "",
style=text_style,
),
style=box_style,
draw=draw_left_separator,
)
]
if self.createdOn
else []
),
*(
[
Box(
Text(
text=f'{diagram.gettext("Updated On")}:',
style=heading_style,
),
Text(
text=lambda: self.updatedOn or "",
style=text_style,
),
style=box_style,
draw=draw_left_separator,
)
]
if self.updatedOn
else []
),
orientation=Orientation.HORIZONTAL,
style=group_style,
draw=draw_top_separator,
)
]
if self.revision or self.license or self.createdOn or self.updatedOn
else []
),
draw=draw_border,
style=group_style,
)
|
frescobaldi-app | contextmenu | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
The contextmenu of the editor.
This module is imported when a contextmenu event occurs in the View (view.py).
"""
import app
import browseriface
import icons
import util
from PyQt5.QtCore import QTimer, QUrl
from PyQt5.QtWidgets import QAction
def contextmenu(view):
cursor = view.textCursor()
menu = view.createStandardContextMenu()
mainwindow = view.window()
# create the actions in the actions list
actions = []
actions.extend(open_files(cursor, menu, mainwindow))
actions.extend(jump_to_definition(cursor, menu, mainwindow))
if cursor.hasSelection():
import panelmanager
actions.append(mainwindow.actionCollection.edit_copy_colored_html)
actions.append(
panelmanager.manager(
mainwindow
).snippettool.actionCollection.copy_to_snippet
)
import documentactions
ac = documentactions.get(mainwindow).actionCollection
actions.append(ac.edit_cut_assign)
actions.append(ac.edit_move_to_include_file)
# now add the actions to the standard menu
if actions:
first_action = menu.actions()[0] if menu.actions() else None
if first_action:
first_action = menu.insertSeparator(first_action)
menu.insertActions(first_action, actions)
else:
menu.addActions(actions)
menu.addSeparator()
extensions = app.extensions().menu("editor")
if not extensions.isEmpty():
menu.addMenu(extensions)
return menu
def open_files(cursor, menu, mainwindow):
"""Return a list of actions (maybe empty) for files at the cursor to open."""
def action(filename):
url = QUrl.fromLocalFile(filename)
a = QAction(menu)
a.setText(_('Open "{url}"').format(url=util.homify(filename)))
a.setIcon(icons.get("document-open"))
@a.triggered.connect
def open_doc():
d = mainwindow.openUrl(url)
if d:
browseriface.get(mainwindow).setCurrentDocument(d)
return a
import open_file_at_cursor
return list(map(action, open_file_at_cursor.filenames_at_cursor(cursor)))
def jump_to_definition(cursor, menu, mainwindow):
"""Return a list of context menu actions jumping to the definition."""
import definition
node = definition.refnode(cursor)
if node:
a = QAction(menu)
def complete():
target = definition.target(node)
if target:
if target.document is node.document:
a.setText(
_("&Jump to definition (line {num})").format(
num=node.document.index(
node.document.block(target.position)
)
+ 1
)
)
else:
a.setText(
_("&Jump to definition (in {filename})").format(
filename=util.homify(target.document.filename)
)
)
@a.triggered.connect
def activate():
definition.goto_target(mainwindow, target)
else:
a.setText(_("&Jump to definition (unknown)"))
a.setEnabled(False)
QTimer.singleShot(0, complete)
return [a]
return []
|
midifile | song | # Python midifile package -- parse, load and play MIDI files.
# Copyright (c) 2011 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
midifile.song -- structures MIDI file data as a song.
"""
import collections
from . import event, parser
def load(filename):
"""Convenience function to instantiate a Song from a filename.
If the filename is a type 2 MIDI file, just returns the first track.
"""
with open(filename, "rb") as midifile:
fmt, div, tracks = parser.parse_midi_data(midifile.read())
if fmt == 2:
tracks = tracks[:1]
return Song(div, tracks)
def events_dict(tracks):
"""Returns all events from the track grouped per and mapped to time-step.
every time step has a dictionary with the events per track at that time.
"""
d = collections.defaultdict(dict)
for n, track in enumerate(tracks):
for time, evs in parser.time_events_grouped(parser.parse_midi_events(track)):
d[time][n] = evs
return d
def events_dict_together(tracks):
"""Returns all events from the track grouped per and mapped to time-step.
every time step has a list with all the events at that time.
"""
d = collections.defaultdict(list)
for track in tracks:
for time, evs in parser.time_events_grouped(parser.parse_midi_events(track)):
d[time].extend(evs)
return d
def is_tempo(e):
"""Returns True if the event is a Set Tempo Meta-event."""
return isinstance(e, event.MetaEvent) and e.type == 0x51
def get_tempo(e):
"""Returns the tempo from the Set Tempo Meta-event."""
return e.data[0] * 65536 + e.data[1] * 256 + e.data[2]
def is_time_signature(e):
"""Returns True if the event is a Set Time Signature Meta-event."""
return isinstance(e, event.MetaEvent) and e.type == 0x58
def get_time_signature(e):
"""Returns the num, den, clocks, num_32s from the Time Signature event."""
return tuple(e.data)
def smpte_division(div):
"""Converts a MIDI header division from a SMPTE type, if necessary."""
if div & 0x8000:
frames = 256 - (div >> 8)
resolution = div & 0xFF
div = frames * resolution
return div
def events_iter(d):
"""Return an iterator function over the events in one value of dict d.
The values in d can be dicts (per-track) or lists (single track).
Returns None if the events dictionary is empty.
"""
for k in d:
return iter_events_dict if isinstance(d[k], dict) else iter
def iter_events_dict(evs):
"""Iter over the (per-track) dictionary's events."""
for k in sorted(evs):
yield from evs[k]
class TempoMap:
"""Converts midi time to real time in microseconds."""
def __init__(self, d, division):
"""Initialize our tempo map based on events d and division."""
# are the events one list (single-track) or a dict (per-track)?
self.division = smpte_division(division)
self.times = times = []
events = events_iter(d)
if events:
for midi_time, evs in sorted(d.items()):
for e in events(evs):
if is_tempo(e):
times.append((midi_time, get_tempo(e)))
break
if not times or times[0][0] != 0:
times.insert(0, (0, 500000))
def real_time(self, midi_time):
"""Returns the real time in microseconds for the given MIDI time."""
real_time = 0
times = self.times
for i in range(1, len(times)):
if times[i][0] >= midi_time:
real_time += (midi_time - times[i - 1][0]) * times[i - 1][1]
break
real_time += (times[i][0] - times[i - 1][0]) * times[i - 1][1]
else:
real_time += (midi_time - times[-1][0]) * times[-1][1]
return real_time // self.division
def msec(self, midi_time):
"""Returns the real time in milliseconds."""
return self.real_time(midi_time) // 1000
def beats(d, division):
"""Yields tuples for every beat in the events dictionary d.
Each tuple is:
(midi_time, beat_num, beat_total, denominator)
With this you can easily add measure numbers and find measure positions
in the MIDI.
"""
events = events_iter(d)
if not events:
return
time_sigs = []
times = sorted(d)
for midi_time in times:
for e in events(d[midi_time]):
if is_time_signature(e):
time_sigs.append((midi_time, get_time_signature(e)))
if not time_sigs or time_sigs[0][0] != 0:
# default time signature at start
time_sigs.insert(0, (0, (4, 4, 24, 8)))
# now yield a tuple for every beat
time = 0
sigs_index = 0
while time <= times[-1]:
if sigs_index < len(time_sigs) and time >= time_sigs[sigs_index][0]:
# new time signature
time, (num, den, clocks, n32s) = time_sigs[sigs_index]
step = (4 * division) // (2**den)
beat = 1
sigs_index += 1
yield time, beat, num, den
time += step
beat = beat % num + 1
class Song:
"""A loaded MIDI file.
The following instance attributes are set on init:
division: the division set in the MIDI header
ntracks: the number of tracks
events: a dict mapping MIDI times to a dict with per-track lists of events.
tempo_map: TempoMap instance that computes real time from MIDI time.
length: the length in milliseconds of the song (same as the time of the last
event).
beats: a list of tuples(msec, measnum, beat, num, den) for every beat
music: a list of tuples(msec, d) where d is a dict mapping tracknr to events
"""
def __init__(self, division, tracks):
"""Initialize the Song with the given division and track chunks."""
self.division = division
self.ntracks = len(tracks)
self.events = events_dict(tracks)
self.tempo_map = t = TempoMap(self.events, division)
self.length = t.msec(max(self.events))
self.beats = b = []
measnum = 0
for midi_time, beat, num, den in beats(self.events, division):
if beat == 1:
measnum += 1
b.append((t.msec(midi_time), measnum, beat, num, den))
self.music = [
(t.msec(midi_time), evs) for midi_time, evs in sorted(self.events.items())
]
def beat(self, time):
"""Returns (time, measnum, beat, num, den) for the beat at time."""
if not self.beats:
return (0, 0, 0, 4, 2)
pos = 0
if time:
# bisect our way in the beats list.
end = len(self.beats)
while pos < end:
mid = (pos + end) // 2
if time > self.beats[mid][0]:
pos = mid + 1
else:
end = mid
return self.beats[min(pos, len(self.beats) - 1)]
|
lector | models | # This file is a part of Lector, a Qt based ebook reader
# Copyright (C) 2017-2019 BasioMeusPuga
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import pathlib
from lector.resources import pie_chart
from PyQt5 import QtCore, QtWidgets
logger = logging.getLogger(__name__)
class BookmarkProxyModel(QtCore.QSortFilterProxyModel):
def __init__(self, parent=None):
super(BookmarkProxyModel, self).__init__(parent)
self.parent = parent
self.parentTab = self.parent.parent
self.filter_text = None
def setFilterParams(self, filter_text):
self.filter_text = filter_text
def setData(self, index, value, role):
if role == QtCore.Qt.EditRole:
source_index = self.mapToSource(index)
identifier = self.sourceModel().data(source_index, QtCore.Qt.UserRole + 2)
self.sourceModel().setData(source_index, value, QtCore.Qt.DisplayRole)
self.parentTab.metadata["bookmarks"][identifier]["description"] = value
return True
class ItemProxyModel(QtCore.QSortFilterProxyModel):
def __init__(self, parent=None):
super(ItemProxyModel, self).__init__(parent)
self.filter_text = None
self.active_library_filters = None
self.sorting_box_position = None
self.common_functions = ProxyModelsCommonFunctions(self)
def setFilterParams(
self, filter_text, active_library_filters, sorting_box_position
):
self.common_functions.setFilterParams(
filter_text, active_library_filters, sorting_box_position
)
def filterAcceptsRow(self, row, parent):
output = self.common_functions.filterAcceptsRow(row, parent)
return output
class TableProxyModel(QtCore.QSortFilterProxyModel):
def __init__(self, temp_dir, tableViewHeader, consider_read_at, parent=None):
super(TableProxyModel, self).__init__(parent)
self.tableViewHeader = tableViewHeader
self.consider_read_at = consider_read_at
self._translate = QtCore.QCoreApplication.translate
title_string = self._translate("TableProxyModel", "Title")
author_string = self._translate("TableProxyModel", "Author")
year_string = self._translate("TableProxyModel", "Year")
lastread_string = self._translate("TableProxyModel", "Last Read")
tags_string = self._translate("TableProxyModel", "Tags")
self.header_data = [
None,
title_string,
author_string,
year_string,
lastread_string,
"%",
tags_string,
]
self.temp_dir = temp_dir
self.filter_text = None
self.active_library_filters = None
self.sorting_box_position = None
self.role_dictionary = {
1: QtCore.Qt.UserRole, # Title
2: QtCore.Qt.UserRole + 1, # Author
3: QtCore.Qt.UserRole + 2, # Year
4: QtCore.Qt.UserRole + 12, # Last read
5: QtCore.Qt.UserRole + 7, # Position percentage
6: QtCore.Qt.UserRole + 4,
} # Tags
self.common_functions = ProxyModelsCommonFunctions(self)
def columnCount(self, parent):
return 7
def headerData(self, column, orientation, role):
if role == QtCore.Qt.DisplayRole:
try:
return self.header_data[column]
except IndexError:
logger.error(
"Table proxy model: Can't find header for column" + str(column)
)
# The column will be called IndexError. Not a typo.
return "IndexError"
def flags(self, index):
# Tag editing will take place by way of a right click menu
# These tags denote clickable and that's about it
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def data(self, index, role):
source_index = self.mapToSource(index)
item = self.sourceModel().item(source_index.row(), 0)
if role == QtCore.Qt.TextAlignmentRole:
if index.column() in (3, 4):
return QtCore.Qt.AlignHCenter
if role == QtCore.Qt.DecorationRole:
if index.column() == 5:
return_pixmap = None
file_exists = item.data(QtCore.Qt.UserRole + 5)
position_percent = item.data(QtCore.Qt.UserRole + 7)
if not file_exists:
return pie_chart.pixmapper(
-1, None, -1, QtCore.Qt.SizeHintRole + 10
)
if position_percent:
return_pixmap = pie_chart.pixmapper(
position_percent,
self.temp_dir,
self.consider_read_at,
QtCore.Qt.SizeHintRole + 10,
)
return return_pixmap
elif role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
if index.column() in (0, 5): # Cover and Status
return QtCore.QVariant()
if index.column() == 4:
last_accessed = item.data(self.role_dictionary[index.column()])
if last_accessed:
right_now = QtCore.QDateTime().currentDateTime()
time_diff = last_accessed.msecsTo(right_now)
return self.time_convert(time_diff // 1000)
return item.data(self.role_dictionary[index.column()])
else:
return QtCore.QVariant()
def setFilterParams(
self, filter_text, active_library_filters, sorting_box_position
):
self.common_functions.setFilterParams(
filter_text, active_library_filters, sorting_box_position
)
def filterAcceptsRow(self, row, parent):
output = self.common_functions.filterAcceptsRow(row, parent)
return output
def sort_table_columns(self, column=None):
column = self.tableViewHeader.sortIndicatorSection()
sorting_order = self.tableViewHeader.sortIndicatorOrder()
self.sort(0, sorting_order)
if column != 0:
self.setSortRole(self.role_dictionary[column])
def time_convert(self, seconds):
seconds = int(seconds)
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
d, h = divmod(h, 24)
if d > 0:
return f"{d}d"
if h > 0:
return f"{h}h"
if m > 0:
return f"{m}m"
else:
return "<1m"
class ProxyModelsCommonFunctions:
def __init__(self, parent_model):
self.parent_model = parent_model
def setFilterParams(
self, filter_text, active_library_filters, sorting_box_position
):
self.parent_model.filter_text = filter_text
self.parent_model.active_library_filters = [
i.lower() for i in active_library_filters
]
self.parent_model.sorting_box_position = sorting_box_position
def filterAcceptsRow(self, row, parent):
model = self.parent_model.sourceModel()
this_index = model.index(row, 0)
title = model.data(this_index, QtCore.Qt.UserRole)
author = model.data(this_index, QtCore.Qt.UserRole + 1)
tags = model.data(this_index, QtCore.Qt.UserRole + 4)
progress = model.data(this_index, QtCore.Qt.UserRole + 7)
directory_name = model.data(this_index, QtCore.Qt.UserRole + 10)
directory_tags = model.data(this_index, QtCore.Qt.UserRole + 11)
last_accessed = model.data(this_index, QtCore.Qt.UserRole + 12)
file_path = model.data(this_index, QtCore.Qt.UserRole + 13)
# Hide untouched files when sorting by last accessed
if self.parent_model.sorting_box_position == 4 and not last_accessed:
return False
# Hide untouched files when sorting by progress
if self.parent_model.sorting_box_position == 5 and not progress:
return False
if self.parent_model.active_library_filters:
if directory_name not in self.parent_model.active_library_filters:
return False
else:
return False
if not self.parent_model.filter_text:
return True
else:
valid_data = [
i.lower()
for i in (
title,
author,
tags,
directory_name,
directory_tags,
file_path,
)
if i is not None
]
for i in valid_data:
if self.parent_model.filter_text.lower() in i:
return True
return False
class MostExcellentFileSystemModel(QtWidgets.QFileSystemModel):
# Directories are tracked on the basis of their paths
# Poll the tag_data dictionary to get User selection
def __init__(self, tag_data, parent=None):
super(MostExcellentFileSystemModel, self).__init__(parent)
self.tag_data = tag_data
self.field_dict = {0: "check_state", 4: "name", 5: "tags"}
def columnCount(self, parent):
# The QFileSystemModel returns 4 columns by default
# Columns 1, 2, 3 will be present but hidden
return 6
def headerData(self, col, orientation, role):
# Columns not mentioned here will be hidden
if orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole:
column_dict = {0: "Path", 4: "Name", 5: "Tags"}
try:
return column_dict[col]
except KeyError:
pass
def data(self, index, role):
if index.column() in (4, 5) and (
role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole
):
read_field = self.field_dict[index.column()]
try:
return self.tag_data[self.filePath(index)][read_field]
except KeyError:
return QtCore.QVariant()
if role == QtCore.Qt.CheckStateRole and index.column() == 0:
return self.checkState(index)
return QtWidgets.QFileSystemModel.data(self, index, role)
def flags(self, index):
if index.column() in (4, 5):
return (
QtCore.Qt.ItemIsEnabled
| QtCore.Qt.ItemIsSelectable
| QtCore.Qt.ItemIsEditable
)
else:
return (
QtWidgets.QFileSystemModel.flags(self, index)
| QtCore.Qt.ItemIsUserCheckable
)
def checkState(self, index):
while index.isValid():
index_path = self.filePath(index)
if index_path in self.tag_data:
return self.tag_data[index_path]["check_state"]
index = index.parent()
return QtCore.Qt.Unchecked
def setData(self, index, value, role):
if (
role == QtCore.Qt.EditRole or role == QtCore.Qt.CheckStateRole
) and index.isValid():
write_field = self.field_dict[index.column()]
self.layoutAboutToBeChanged.emit()
this_path = self.filePath(index)
if this_path not in self.tag_data:
self.populate_dictionary(this_path)
self.tag_data[this_path][write_field] = value
self.depopulate_dictionary()
self.layoutChanged.emit()
return True
def populate_dictionary(self, path):
self.tag_data[path] = {}
self.tag_data[path]["name"] = None
self.tag_data[path]["tags"] = None
self.tag_data[path]["check_state"] = QtCore.Qt.Checked
def depopulate_dictionary(self):
# This keeps the tag_data dictionary manageable as well as preventing
# weird ass behaviour when something is deselected and its tags are cleared
deletable = set()
for i in self.tag_data.items():
all_data = [j[1] for j in i[1].items()]
filtered_down = list(filter(lambda x: x is not None and x != 0, all_data))
if not filtered_down:
deletable.add(i[0])
# Get untagged subdirectories too
all_dirs = [i for i in self.tag_data]
all_dirs.sort()
def is_child(this_dir):
this_path = pathlib.Path(this_dir)
for i in all_dirs:
if pathlib.Path(i) in this_path.parents:
# If a parent folder has tags, we only want the deletion
# to kick in in case the parent is also checked
if self.tag_data[i]["check_state"] == QtCore.Qt.Checked:
return True
return False
for i in all_dirs:
if is_child(i):
dir_tags = (self.tag_data[i]["name"], self.tag_data[i]["tags"])
filtered_down = list(
filter(lambda x: x is not None and x != "", dir_tags)
)
if not filtered_down:
deletable.add(i)
for i in deletable:
del self.tag_data[i]
|
apps | uhd_display | #!/usr/bin/env python
#
# Copyright 2009,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import sys
from optparse import OptionParser
from gnuradio import blocks, eng_notation, fft, filter, gr, uhd
from gnuradio.eng_option import eng_option
try:
import sip
from gnuradio import qtgui
from PyQt5 import QtCore, QtGui
except ImportError:
print("Error: Program requires PyQt5 and gr-qtgui.")
sys.exit(1)
try:
from usrp_display_qtgui import Ui_MainWindow
except ImportError:
print("Error: could not find usrp_display_qtgui.py:")
print('\t"pyuic4 usrp_display_qtgui.ui -o usrp_display_qtgui.py"')
sys.exit(1)
# ////////////////////////////////////////////////////////////////////
# Define the QT Interface and Control Dialog
# ////////////////////////////////////////////////////////////////////
class main_window(QtGui.QMainWindow):
def __init__(self, snk, fg, parent=None):
QtGui.QWidget.__init__(self, parent)
self.gui = Ui_MainWindow()
self.gui.setupUi(self)
self.fg = fg
# Add the qtsnk widgets to the layout box
self.gui.sinkLayout.addWidget(snk)
self.gui.dcGainEdit.setText(QtCore.QString("%1").arg(0.001))
# Connect up some signals
self.connect(self.gui.pauseButton, QtCore.SIGNAL("clicked()"), self.pauseFg)
self.connect(
self.gui.frequencyEdit,
QtCore.SIGNAL("editingFinished()"),
self.frequencyEditText,
)
self.connect(
self.gui.gainEdit, QtCore.SIGNAL("editingFinished()"), self.gainEditText
)
self.connect(
self.gui.bandwidthEdit,
QtCore.SIGNAL("editingFinished()"),
self.bandwidthEditText,
)
self.connect(
self.gui.amplifierEdit,
QtCore.SIGNAL("editingFinished()"),
self.amplifierEditText,
)
self.connect(
self.gui.actionSaveData, QtCore.SIGNAL("activated()"), self.saveData
)
self.gui.actionSaveData.setShortcut(QtGui.QKeySequence.Save)
self.connect(
self.gui.dcGainEdit, QtCore.SIGNAL("editingFinished()"), self.dcGainEditText
)
self.connect(
self.gui.dcCancelCheckBox,
QtCore.SIGNAL("clicked(bool)"),
self.dcCancelClicked,
)
def pauseFg(self):
if self.gui.pauseButton.text() == "Pause":
self.fg.stop()
self.fg.wait()
self.gui.pauseButton.setText("Unpause")
else:
self.fg.start()
self.gui.pauseButton.setText("Pause")
# Functions to set the values in the GUI
def set_frequency(self, freq):
self.freq = freq
sfreq = eng_notation.num_to_str(self.freq)
self.gui.frequencyEdit.setText(QtCore.QString("%1").arg(sfreq))
def set_gain(self, gain):
self.gain = gain
self.gui.gainEdit.setText(QtCore.QString("%1").arg(self.gain))
def set_bandwidth(self, bw):
self.bw = bw
sbw = eng_notation.num_to_str(self.bw)
self.gui.bandwidthEdit.setText(QtCore.QString("%1").arg(sbw))
def set_amplifier(self, amp):
self.amp = amp
self.gui.amplifierEdit.setText(QtCore.QString("%1").arg(self.amp))
# Functions called when signals are triggered in the GUI
def frequencyEditText(self):
try:
freq = eng_notation.str_to_num(self.gui.frequencyEdit.text().toAscii())
self.fg.set_frequency(freq)
self.freq = freq
except RuntimeError:
pass
def gainEditText(self):
try:
gain = float(self.gui.gainEdit.text())
self.fg.set_gain(gain)
self.gain = gain
except ValueError:
pass
def bandwidthEditText(self):
try:
bw = eng_notation.str_to_num(self.gui.bandwidthEdit.text().toAscii())
self.fg.set_bandwidth(bw)
self.bw = bw
except ValueError:
pass
def amplifierEditText(self):
try:
amp = float(self.gui.amplifierEdit.text())
self.fg.set_amplifier_gain(amp)
self.amp = amp
except ValueError:
pass
def saveData(self):
fileName = QtGui.QFileDialog.getSaveFileName(self, "Save data to file", ".")
if len(fileName):
self.fg.save_to_file(str(fileName))
def dcGainEditText(self):
gain = float(self.gui.dcGainEdit.text())
self.fg.set_dc_gain(gain)
def dcCancelClicked(self, state):
self.dcGainEditText()
self.fg.cancel_dc(state)
class my_top_block(gr.top_block):
def __init__(self, options):
gr.top_block.__init__(self)
self.options = options
self.show_debug_info = True
self.qapp = QtGui.QApplication(sys.argv)
self.u = uhd.usrp_source(
device_addr=options.address, stream_args=uhd.stream_args("fc32")
)
if options.antenna:
self.u.set_antenna(options.antenna, 0)
self.set_bandwidth(options.samp_rate)
if options.gain is None:
# if no gain was specified, use the mid-point in dB
g = self.u.get_gain_range()
options.gain = float(g.start() + g.stop()) / 2
self.set_gain(options.gain)
if options.freq is None:
# if no freq was specified, use the mid-point
r = self.u.get_freq_range()
options.freq = float(r.start() + r.stop()) / 2
self.set_frequency(options.freq)
self._fftsize = options.fft_size
self.snk = qtgui.sink_c(
options.fft_size,
fft.window.WIN_BLACKMAN_hARRIS,
self._freq,
self._bandwidth,
"UHD Display",
True,
True,
True,
False,
)
# Set up internal amplifier
self.amp = blocks.multiply_const_cc(0.0)
self.set_amplifier_gain(100)
# Create a single-pole IIR filter to remove DC
# but don't connect it yet
self.dc_gain = 0.001
self.dc = filter.single_pole_iir_filter_cc(self.dc_gain)
self.dc_sub = blocks.sub_cc()
self.connect(self.u, self.amp, self.snk)
if self.show_debug_info:
print("Bandwidth: ", self.u.get_samp_rate())
print("Center Freq: ", self.u.get_center_freq())
print("Freq Range: ", self.u.get_freq_range())
# Get the reference pointer to the SpectrumDisplayForm QWidget
# Wrap the pointer as a PyQt SIP object
# This can now be manipulated as a PyQt5.QtGui.QWidget
self.pysink = sip.wrapinstance(self.snk.qwidget(), QtGui.QWidget)
self.main_win = main_window(self.pysink, self)
self.main_win.set_frequency(self._freq)
self.main_win.set_gain(self._gain)
self.main_win.set_bandwidth(self._bandwidth)
self.main_win.set_amplifier(self._amp_value)
self.main_win.show()
def save_to_file(self, name):
self.lock()
# Add file sink to save data
self.file_sink = blocks.file_sink(gr.sizeof_gr_complex, name)
self.connect(self.amp, self.file_sink)
self.unlock()
def set_gain(self, gain):
self._gain = gain
self.u.set_gain(self._gain)
def set_frequency(self, freq):
self._freq = freq
r = self.u.set_center_freq(freq)
try:
self.snk.set_frequency_range(self._freq, self._bandwidth)
except RuntimeError:
pass
def set_bandwidth(self, bw):
self._bandwidth = bw
self.u.set_samp_rate(self._bandwidth)
try:
self.snk.set_frequency_range(self._freq, self._bandwidth)
except RuntimeError:
pass
def set_amplifier_gain(self, amp):
self._amp_value = amp
self.amp.set_k(self._amp_value)
def set_dc_gain(self, gain):
self.dc.set_taps(gain)
def cancel_dc(self, state):
self.lock()
if state:
self.disconnect(self.u, self.amp)
self.connect(self.u, (self.dc_sub, 0))
self.connect(self.u, self.dc, (self.dc_sub, 1))
self.connect(self.dc_sub, self.amp)
else:
self.disconnect(self.dc_sub, self.amp)
self.disconnect(self.dc, (self.dc_sub, 1))
self.disconnect(self.u, self.dc)
self.disconnect(self.u, (self.dc_sub, 0))
self.connect(self.u, self.amp)
self.unlock()
def main():
parser = OptionParser(option_class=eng_option)
parser.add_option(
"-a",
"--address",
type="string",
default="addr=192.168.10.2",
help="Address of UHD device, [default=%default]",
)
parser.add_option(
"-A",
"--antenna",
type="string",
default=None,
help="select Rx Antenna where appropriate",
)
parser.add_option(
"-s",
"--samp-rate",
type="eng_float",
default=1e6,
help="set sample rate (bandwidth) [default=%default]",
)
parser.add_option(
"-f",
"--freq",
type="eng_float",
default=2412e6,
help="set frequency to FREQ",
metavar="FREQ",
)
parser.add_option(
"-g",
"--gain",
type="eng_float",
default=None,
help="set gain in dB (default is midpoint)",
)
parser.add_option(
"--fft-size",
type="int",
default=2048,
help="Set number of FFT bins [default=%default]",
)
(options, args) = parser.parse_args()
if len(args) != 0:
parser.print_help()
sys.exit(1)
tb = my_top_block(options)
tb.start()
tb.snk.exec_()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
pass
|
examples | pyqt_freq_c | #!/usr/bin/env python
#
# Copyright 2012,2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import sys
from gnuradio import blocks, filter, gr
from gnuradio.fft import window
try:
import sip
from gnuradio import qtgui
from PyQt5 import Qt, QtWidgets
except ImportError:
sys.stderr.write("Error: Program requires PyQt5 and gr-qtgui.\n")
sys.exit(1)
try:
from gnuradio import analog
except ImportError:
sys.stderr.write("Error: Program requires gr-analog.\n")
sys.exit(1)
try:
from gnuradio import channels
except ImportError:
sys.stderr.write("Error: Program requires gr-channels.\n")
sys.exit(1)
class dialog_box(QtWidgets.QWidget):
def __init__(self, display, control):
QtWidgets.QWidget.__init__(self, None)
self.setWindowTitle("PyQt Test GUI")
self.boxlayout = QtWidgets.QBoxLayout(QtWidgets.QBoxLayout.LeftToRight, self)
self.boxlayout.addWidget(display, 1)
self.boxlayout.addWidget(control)
self.resize(800, 500)
class control_box(QtWidgets.QWidget):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
self.setWindowTitle("Control Panel")
self.setToolTip("Control the signals")
QtWidgets.QToolTip.setFont(Qt.QFont("OldEnglish", 10))
self.layout = QtWidgets.QFormLayout(self)
# Control the first signal
self.freq1Edit = QtWidgets.QLineEdit(self)
self.freq1Edit.setMinimumWidth(100)
self.layout.addRow("Signal 1 Frequency:", self.freq1Edit)
self.freq1Edit.editingFinished.connect(self.freq1EditText)
self.amp1Edit = QtWidgets.QLineEdit(self)
self.amp1Edit.setMinimumWidth(100)
self.layout.addRow("Signal 1 Amplitude:", self.amp1Edit)
self.amp1Edit.editingFinished.connect(self.amp1EditText)
# Control the second signal
self.freq2Edit = QtWidgets.QLineEdit(self)
self.freq2Edit.setMinimumWidth(100)
self.layout.addRow("Signal 2 Frequency:", self.freq2Edit)
self.freq2Edit.editingFinished.connect(self.freq2EditText)
self.amp2Edit = QtWidgets.QLineEdit(self)
self.amp2Edit.setMinimumWidth(100)
self.layout.addRow("Signal 2 Amplitude:", self.amp2Edit)
self.amp2Edit.editingFinished.connect(self.amp2EditText)
self.quit = QtWidgets.QPushButton("Close", self)
self.quit.setMinimumWidth(100)
self.layout.addWidget(self.quit)
self.quit.clicked.connect(QtWidgets.qApp.quit)
def attach_signal1(self, signal):
self.signal1 = signal
self.freq1Edit.setText(("{0}").format(self.signal1.frequency()))
self.amp1Edit.setText(("{0}").format(self.signal1.amplitude()))
def attach_signal2(self, signal):
self.signal2 = signal
self.freq2Edit.setText(("{0}").format(self.signal2.frequency()))
self.amp2Edit.setText(("{0}").format(self.signal2.amplitude()))
def freq1EditText(self):
try:
newfreq = float(self.freq1Edit.text())
self.signal1.set_frequency(newfreq)
except ValueError:
print("Bad frequency value entered")
def amp1EditText(self):
try:
newamp = float(self.amp1Edit.text())
self.signal1.set_amplitude(newamp)
except ValueError:
print("Bad amplitude value entered")
def freq2EditText(self):
try:
newfreq = float(self.freq2Edit.text())
self.signal2.set_frequency(newfreq)
except ValueError:
print("Bad frequency value entered")
def amp2EditText(self):
try:
newamp = float(self.amp2Edit.text())
self.signal2.set_amplitude(newamp)
except ValueError:
print("Bad amplitude value entered")
class my_top_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
Rs = 8000
f1 = 100
f2 = 200
npts = 2048
self.qapp = QtWidgets.QApplication(sys.argv)
ss = open(gr.prefix() + "/share/gnuradio/themes/dark.qss")
sstext = ss.read()
ss.close()
self.qapp.setStyleSheet(sstext)
src1 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f1, 0.1, 0)
src2 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f2, 0.1, 0)
src = blocks.add_cc()
channel = channels.channel_model(0.01)
thr = blocks.throttle(gr.sizeof_gr_complex, 100 * npts)
self.snk1 = qtgui.freq_sink_c(
npts, window.WIN_BLACKMAN_hARRIS, 0, Rs, "Complex Freq Example", 3, None
)
self.connect(src1, (src, 0))
self.connect(src2, (src, 1))
self.connect(src, channel, thr, (self.snk1, 0))
self.connect(src1, (self.snk1, 1))
self.connect(src2, (self.snk1, 2))
self.ctrl_win = control_box()
self.ctrl_win.attach_signal1(src1)
self.ctrl_win.attach_signal2(src2)
# Get the reference pointer to the SpectrumDisplayForm QWidget
pyQt = self.snk1.qwidget()
# Wrap the pointer as a PyQt SIP object
# This can now be manipulated as a PyQt5.QtWidgets.QWidget
pyWin = sip.wrapinstance(pyQt, QtWidgets.QWidget)
# pyWin.show()
self.main_box = dialog_box(pyWin, self.ctrl_win)
self.main_box.show()
if __name__ == "__main__":
tb = my_top_block()
tb.start()
tb.qapp.exec_()
tb.stop()
|
Peer | PeerPortchecker | import logging
import re
import time
import urllib.parse
import urllib.request
from Debug import Debug
from util import UpnpPunch
class PeerPortchecker(object):
checker_functions = {
"ipv4": ["checkIpfingerprints", "checkCanyouseeme"],
"ipv6": ["checkMyaddr", "checkIpv6scanner"],
}
def __init__(self, file_server):
self.log = logging.getLogger("PeerPortchecker")
self.upnp_port_opened = False
self.file_server = file_server
def requestUrl(self, url, post_data=None):
if type(post_data) is dict:
post_data = urllib.parse.urlencode(post_data).encode("utf8")
req = urllib.request.Request(url, post_data)
req.add_header("Referer", url)
req.add_header(
"User-Agent",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11",
)
req.add_header(
"Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
)
return urllib.request.urlopen(req, timeout=20.0)
def portOpen(self, port):
self.log.info("Trying to open port using UpnpPunch...")
try:
UpnpPunch.ask_to_open_port(port, "ZeroNet", retries=3, protos=["TCP"])
self.upnp_port_opened = True
except Exception as err:
self.log.warning("UpnpPunch run error: %s" % Debug.formatException(err))
return False
return True
def portClose(self, port):
return UpnpPunch.ask_to_close_port(port, protos=["TCP"])
def portCheck(self, port, ip_type="ipv4"):
checker_functions = self.checker_functions[ip_type]
for func_name in checker_functions:
func = getattr(self, func_name)
s = time.time()
try:
res = func(port)
if res:
self.log.info(
"Checked port %s (%s) using %s result: %s in %.3fs"
% (port, ip_type, func_name, res, time.time() - s)
)
time.sleep(0.1)
if res["opened"] and not self.file_server.had_external_incoming:
res["opened"] = False
self.log.warning(
"Port %s:%s looks opened, but no incoming connection"
% (res["ip"], port)
)
break
except Exception as err:
self.log.warning(
"%s check error: %s in %.3fs"
% (func_name, Debug.formatException(err), time.time() - s)
)
res = {"ip": None, "opened": False}
return res
def checkCanyouseeme(self, port):
data = (
urllib.request.urlopen(
"https://www.canyouseeme.org/",
b"ip=1.1.1.1&port=%s" % str(port).encode("ascii"),
timeout=20.0,
)
.read()
.decode("utf8")
)
message = re.match(
r'.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL
).group(1)
message = re.sub(
r"<.*?>", "", message.replace("<br>", " ").replace(" ", " ")
) # Strip http tags
match = re.match(r".*service on (.*?) on", message)
if match:
ip = match.group(1)
else:
raise Exception("Invalid response: %s" % message)
if "Success" in message:
return {"ip": ip, "opened": True}
elif "Error" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
def checkIpfingerprints(self, port):
data = (
self.requestUrl("https://www.ipfingerprints.com/portscan.php")
.read()
.decode("utf8")
)
ip = re.match(r'.*name="remoteHost".*?value="(.*?)"', data, re.DOTALL).group(1)
post_data = {
"remoteHost": ip,
"start_port": port,
"end_port": port,
"normalScan": "Yes",
"scan_type": "connect2",
"ping_type": "none",
}
message = (
self.requestUrl(
"https://www.ipfingerprints.com/scripts/getPortsInfo.php", post_data
)
.read()
.decode("utf8")
)
if "open" in message:
return {"ip": ip, "opened": True}
elif "filtered" in message or "closed" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
def checkMyaddr(self, port):
url = "http://ipv6.my-addr.com/online-ipv6-port-scan.php"
data = self.requestUrl(url).read().decode("utf8")
ip = re.match(
r".*Your IP address is:[ ]*([0-9\.:a-z]+)",
data.replace(" ", ""),
re.DOTALL,
).group(1)
post_data = {"addr": ip, "ports_selected": "", "ports_list": port}
data = self.requestUrl(url, post_data).read().decode("utf8")
message = re.match(
r".*<table class='table_font_16'>(.*?)</table>", data, re.DOTALL
).group(1)
if "ok.png" in message:
return {"ip": ip, "opened": True}
elif "fail.png" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
def checkIpv6scanner(self, port):
url = "http://www.ipv6scanner.com/cgi-bin/main.py"
data = self.requestUrl(url).read().decode("utf8")
ip = re.match(
r".*Your IP address is[ ]*([0-9\.:a-z]+)",
data.replace(" ", ""),
re.DOTALL,
).group(1)
post_data = {
"host": ip,
"scanType": "1",
"port": port,
"protocol": "tcp",
"authorized": "yes",
}
data = self.requestUrl(url, post_data).read().decode("utf8")
message = re.match(
r".*<table id='scantable'>(.*?)</table>", data, re.DOTALL
).group(1)
message_text = re.sub(
"<.*?>", " ", message.replace("<br>", " ").replace(" ", " ").strip()
) # Strip http tags
if "OPEN" in message_text:
return {"ip": ip, "opened": True}
elif "CLOSED" in message_text or "FILTERED" in message_text:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message_text)
def checkPortchecker(self, port): # Not working: Forbidden
data = self.requestUrl("https://portchecker.co").read().decode("utf8")
csrf = re.match(r'.*name="_csrf" value="(.*?)"', data, re.DOTALL).group(1)
data = (
self.requestUrl("https://portchecker.co", {"port": port, "_csrf": csrf})
.read()
.decode("utf8")
)
message = re.match(
r'.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL
).group(1)
message = re.sub(
r"<.*?>", "", message.replace("<br>", " ").replace(" ", " ").strip()
) # Strip http tags
match = re.match(r".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL)
if match:
ip = match.group(1)
else:
raise Exception("Invalid response: %s" % message)
if "open" in message:
return {"ip": ip, "opened": True}
elif "closed" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
def checkSubnetonline(self, port): # Not working: Invalid response
url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php"
data = self.requestUrl(url).read().decode("utf8")
ip = re.match(
r'.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL
).group(1)
token = re.match(r'.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1)
post_data = {
"host": ip,
"port": port,
"allow": "on",
"token": token,
"submit": "Scanning..",
}
data = self.requestUrl(url, post_data).read().decode("utf8")
print(post_data, data)
message = re.match(
r".*<div class='formfield'>(.*?)</div>", data, re.DOTALL
).group(1)
message = re.sub(
r"<.*?>", "", message.replace("<br>", " ").replace(" ", " ").strip()
) # Strip http tags
if "online" in message:
return {"ip": ip, "opened": True}
elif "closed" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
|
plugins | unique_prices | """This module adds validation that there is a single price defined per
date and base/quote currencies. If multiple conflicting price values are
declared, an error is generated. Note that multiple price entries with the
same number do not generate an error.
This is meant to be turned on if you want to use a very strict mode for
entering prices, and may not be realistic usage. For example, if you have
(1) a transaction with an implicitly generated price during the day (from
its cost) and (2) a separate explicit price directive that declares a
different price for the day's closing price, this would generate an error.
I'm not certain this will be useful in the long run, so placing it in a
plugin.
"""
__copyright__ = "Copyright (C) 2014, 2016-2017 Martin Blais"
__license__ = "GNU GPLv2"
import collections
from beancount.core import data
__plugins__ = ("validate_unique_prices",)
UniquePricesError = collections.namedtuple("UniquePricesError", "source message entry")
def validate_unique_prices(entries, unused_options_map):
"""Check that there is only a single price per day for a particular base/quote.
Args:
entries: A list of directives. We're interested only in the Transaction instances.
unused_options_map: A parser options dict.
Returns:
The list of input entries, and a list of new UniquePricesError instances generated.
"""
new_entries = []
errors = []
prices = collections.defaultdict(list)
for entry in entries:
if not isinstance(entry, data.Price):
continue
key = (entry.date, entry.currency, entry.amount.currency)
prices[key].append(entry)
errors = []
for price_entries in prices.values():
if len(price_entries) > 1:
number_map = {
price_entry.amount.number: price_entry for price_entry in price_entries
}
if len(number_map) > 1:
# Note: This should be a list of entries for better error
# reporting. (Later.)
error_entry = next(iter(number_map.values()))
errors.append(
UniquePricesError(
error_entry.meta, "Disagreeing price entries", price_entries
)
)
return entries, errors
|
parts | plgarea | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2018 by Ihor E. Novikov
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import wal
from sk1.parts.plgtabs import PlgnTabPanel
class PlgArea(wal.HPanel):
app = None
active_plg = None
plugins = []
container = None
tabs = None
def __init__(self, app, parent):
self.app = app
wal.HPanel.__init__(self, parent)
self.pack(wal.SplitterSash(self, parent), fill=True)
self.pack(wal.PLine(self), fill=True)
self.container = wal.VPanel(self)
self.pack(self.container, expand=True, fill=True)
self.tabs = PlgnTabPanel(app, self)
self.pack(self.tabs, fill=True)
self.layout()
def check_pid(self, pid):
for item in self.plugins:
if item.pid == pid:
return item
return None
def load_plugin(self, pid):
item = self.app.plugins[pid]
item.activate()
self.plugins.append(item)
return item
def show_plugin(self, pid, *args):
if not pid:
return
if self.active_plg and pid == self.active_plg.pid:
self.active_plg.show_signal(*args)
return
item = self.check_pid(pid)
if self.active_plg:
self.active_plg.hide()
if not item:
self.container.hide(update=False)
item = self.load_plugin(pid)
self.container.pack(item.panel, expand=True, fill=True)
self.tabs.add_new_tab(item)
item.panel.layout()
self.container.show()
else:
self.tabs.set_active(item)
self.active_plg = item
self.container.layout()
self.active_plg.show(*args)
self.active_plg.panel.refresh()
self.app.mdiarea.show_plugin_area()
def close_plugin(self, pid):
item = self.check_pid(pid)
if not item:
return
self.tabs.remove_tab(item)
self.plugins.remove(item)
self.container.remove(item.panel)
item.hide()
if self.active_plg == item:
self.active_plg = None
if self.plugins:
self.show_plugin(self.plugins[0].pid)
else:
self.app.mdiarea.show_plugin_area(False)
|
qtui | phrasepage | # Copyright (C) 2011 Chris Dekter
# Copyright (C) 2018, 2019 Thomas Hess <thomas.hess@udo.edu>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import autokey.model.phrase
from autokey.qtui import common as ui_common
from PyQt5.QtWidgets import QMessageBox
PROBLEM_MSG_PRIMARY = "Some problems were found"
PROBLEM_MSG_SECONDARY = "{}\n\nYour changes have not been saved."
# TODO: Once the port to Qt5 is done, set the editor placeholder text in the UI file to "Enter your phrase here."
# TODO: Pure Qt4 QTextEdit does not support placeholder texts, so this functionality is currently unavailable.
class PhrasePage(*ui_common.inherits_from_ui_file_with_name("phrasepage")):
def __init__(self):
super(PhrasePage, self).__init__()
self.setupUi(self)
self.initialising = True
self.current_phrase = None # type: autokey.model.phrase.Phrase
for val in sorted(autokey.model.phrase.SEND_MODES.keys()):
self.sendModeCombo.addItem(val)
self.initialising = False
def load(self, phrase: autokey.model.phrase.Phrase):
self.current_phrase = phrase
self.phraseText.setPlainText(phrase.phrase)
self.showInTrayCheckbox.setChecked(phrase.show_in_tray_menu)
for k, v in autokey.model.phrase.SEND_MODES.items():
if v == phrase.sendMode:
self.sendModeCombo.setCurrentIndex(self.sendModeCombo.findText(k))
break
if self.is_new_item():
self.urlLabel.setEnabled(False)
self.urlLabel.setText("(Unsaved)") # TODO: i18n
else:
ui_common.set_url_label(self.urlLabel, self.current_phrase.path)
# TODO - re-enable me if restoring predictive functionality
# self.predictCheckbox.setChecked(model.TriggerMode.PREDICTIVE in phrase.modes)
self.promptCheckbox.setChecked(phrase.prompt)
self.settingsWidget.load(phrase)
def save(self):
self.settingsWidget.save()
self.current_phrase.phrase = str(self.phraseText.toPlainText())
self.current_phrase.show_in_tray_menu = self.showInTrayCheckbox.isChecked()
self.current_phrase.sendMode = autokey.model.phrase.SEND_MODES[
str(self.sendModeCombo.currentText())
]
# TODO - re-enable me if restoring predictive functionality
# if self.predictCheckbox.isChecked():
# self.currentPhrase.modes.append(model.TriggerMode.PREDICTIVE)
self.current_phrase.prompt = self.promptCheckbox.isChecked()
self.current_phrase.persist()
ui_common.set_url_label(self.urlLabel, self.current_phrase.path)
return False
def get_current_item(self):
"""Returns the currently held item."""
return self.current_phrase
def set_item_title(self, title):
self.current_phrase.description = title
def rebuild_item_path(self):
self.current_phrase.rebuild_path()
def is_new_item(self):
return self.current_phrase.path is None
def reset(self):
self.load(self.current_phrase)
def validate(self):
errors = []
# Check phrase content
phrase = str(self.phraseText.toPlainText())
if ui_common.EMPTY_FIELD_REGEX.match(phrase):
errors.append("The phrase content can't be empty") # TODO: i18n
# Check settings
errors += self.settingsWidget.validate()
if errors:
msg = PROBLEM_MSG_SECONDARY.format("\n".join([str(e) for e in errors]))
QMessageBox.critical(self.window(), PROBLEM_MSG_PRIMARY, msg)
return not bool(errors)
def set_dirty(self):
self.window().set_dirty()
def undo(self):
self.phraseText.undo()
def redo(self):
self.phraseText.redo()
def insert_token(self, token):
self.phraseText.insertPlainText(token)
# --- Signal handlers
def on_phraseText_textChanged(self):
self.set_dirty()
def on_phraseText_undoAvailable(self, state):
self.window().set_undo_available(state)
def on_phraseText_redoAvailable(self, state):
self.window().set_redo_available(state)
def on_predictCheckbox_stateChanged(self, state):
self.set_dirty()
def on_promptCheckbox_stateChanged(self, state):
self.set_dirty()
def on_showInTrayCheckbox_stateChanged(self, state):
self.set_dirty()
def on_sendModeCombo_currentIndexChanged(self, index):
if not self.initialising:
self.set_dirty()
def on_urlLabel_leftClickedUrl(self, url=None):
if url:
subprocess.Popen(["/usr/bin/xdg-open", url])
|
Assembly | CommandCreateJoint | # SPDX-License-Identifier: LGPL-2.1-or-later
# /****************************************************************************
# *
# Copyright (c) 2023 Ondsel <development@ondsel.com> *
# *
# This file is part of FreeCAD. *
# *
# FreeCAD is free software: you can redistribute it and/or modify it *
# under the terms of the GNU Lesser General Public License as *
# published by the Free Software Foundation, either version 2.1 of the *
# License, or (at your option) any later version. *
# *
# FreeCAD is distributed in the hope that it will be useful, but *
# WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
# Lesser General Public License for more details. *
# *
# You should have received a copy of the GNU Lesser General Public *
# License along with FreeCAD. If not, see *
# <https://www.gnu.org/licenses/>. *
# *
# ***************************************************************************/
import os
import FreeCAD as App
from PySide.QtCore import QT_TRANSLATE_NOOP
if App.GuiUp:
import FreeCADGui as Gui
from PySide import QtCore, QtGui, QtWidgets
import Assembly_rc
import JointObject
import UtilsAssembly
# translate = App.Qt.translate
__title__ = "Assembly Commands to Create Joints"
__author__ = "Ondsel"
__url__ = "https://www.freecad.org"
class CommandCreateJointFixed:
def __init__(self):
pass
def GetResources(self):
return {
"Pixmap": "Assembly_CreateJointFixed",
"MenuText": QT_TRANSLATE_NOOP(
"Assembly_CreateJointFixed", "Create Fixed Joint"
),
"Accel": "F",
"ToolTip": QT_TRANSLATE_NOOP(
"Assembly_CreateJointFixed",
"<p>Create a Fixed Joint: Permanently locks two parts together, preventing any movement or rotation.</p>",
),
"CmdType": "ForEdit",
}
def IsActive(self):
return UtilsAssembly.activeAssembly() is not None
def Activated(self):
assembly = UtilsAssembly.activeAssembly()
if not assembly:
return
view = Gui.activeDocument().activeView()
self.panel = TaskAssemblyCreateJoint(assembly, view, 0)
Gui.Control.showDialog(self.panel)
class CommandCreateJointRevolute:
def __init__(self):
pass
def GetResources(self):
return {
"Pixmap": "Assembly_CreateJointRevolute",
"MenuText": QT_TRANSLATE_NOOP(
"Assembly_CreateJointRevolute", "Create Revolute Joint"
),
"Accel": "R",
"ToolTip": QT_TRANSLATE_NOOP(
"Assembly_CreateJointRevolute",
"<p>Create a Revolute Joint: Allows rotation around a single axis between selected parts.</p>",
),
"CmdType": "ForEdit",
}
def IsActive(self):
return UtilsAssembly.activeAssembly() is not None
def Activated(self):
assembly = UtilsAssembly.activeAssembly()
if not assembly:
return
view = Gui.activeDocument().activeView()
self.panel = TaskAssemblyCreateJoint(assembly, view, 1)
Gui.Control.showDialog(self.panel)
class CommandCreateJointCylindrical:
def __init__(self):
pass
def GetResources(self):
return {
"Pixmap": "Assembly_CreateJointCylindrical",
"MenuText": QT_TRANSLATE_NOOP(
"Assembly_CreateJointCylindrical", "Create Cylindrical Joint"
),
"Accel": "C",
"ToolTip": QT_TRANSLATE_NOOP(
"Assembly_CreateJointCylindrical",
"<p>Create a Cylindrical Joint: Enables rotation along one axis while permitting movement along the same axis between assembled parts.</p>",
),
"CmdType": "ForEdit",
}
def IsActive(self):
return UtilsAssembly.activeAssembly() is not None
def Activated(self):
assembly = UtilsAssembly.activeAssembly()
if not assembly:
return
view = Gui.activeDocument().activeView()
self.panel = TaskAssemblyCreateJoint(assembly, view, 2)
Gui.Control.showDialog(self.panel)
class CommandCreateJointSlider:
def __init__(self):
pass
def GetResources(self):
return {
"Pixmap": "Assembly_CreateJointSlider",
"MenuText": QT_TRANSLATE_NOOP(
"Assembly_CreateJointSlider", "Create Slider Joint"
),
"Accel": "S",
"ToolTip": QT_TRANSLATE_NOOP(
"Assembly_CreateJointSlider",
"<p>Create a Slider Joint: Allows linear movement along a single axis but restricts rotation between selected parts.</p>",
),
"CmdType": "ForEdit",
}
def IsActive(self):
return UtilsAssembly.activeAssembly() is not None
def Activated(self):
assembly = UtilsAssembly.activeAssembly()
if not assembly:
return
view = Gui.activeDocument().activeView()
self.panel = TaskAssemblyCreateJoint(assembly, view, 3)
Gui.Control.showDialog(self.panel)
class CommandCreateJointBall:
def __init__(self):
pass
def GetResources(self):
return {
"Pixmap": "Assembly_CreateJointBall",
"MenuText": QT_TRANSLATE_NOOP(
"Assembly_CreateJointBall", "Create Ball Joint"
),
"Accel": "B",
"ToolTip": QT_TRANSLATE_NOOP(
"Assembly_CreateJointBall",
"<p>Create a Ball Joint: Connects parts at a point, allowing unrestricted movement as long as the connection points remain in contact.</p>",
),
"CmdType": "ForEdit",
}
def IsActive(self):
return UtilsAssembly.activeAssembly() is not None
def Activated(self):
assembly = UtilsAssembly.activeAssembly()
if not assembly:
return
view = Gui.activeDocument().activeView()
self.panel = TaskAssemblyCreateJoint(assembly, view, 4)
Gui.Control.showDialog(self.panel)
class CommandCreateJointPlanar:
def __init__(self):
pass
def GetResources(self):
return {
"Pixmap": "Assembly_CreateJointPlanar",
"MenuText": QT_TRANSLATE_NOOP(
"Assembly_CreateJointPlanar", "Create Planar Joint"
),
"Accel": "P",
"ToolTip": QT_TRANSLATE_NOOP(
"Assembly_CreateJointPlanar",
"<p>Create a Planar Joint: Ensures two selected features are in the same plane, restricting movement to that plane.</p>",
),
"CmdType": "ForEdit",
}
def IsActive(self):
return UtilsAssembly.activeAssembly() is not None
def Activated(self):
assembly = UtilsAssembly.activeAssembly()
if not assembly:
return
view = Gui.activeDocument().activeView()
self.panel = TaskAssemblyCreateJoint(assembly, view, 5)
Gui.Control.showDialog(self.panel)
class CommandCreateJointParallel:
def __init__(self):
pass
def GetResources(self):
return {
"Pixmap": "Assembly_CreateJointParallel",
"MenuText": QT_TRANSLATE_NOOP(
"Assembly_CreateJointParallel", "Create Parallel Joint"
),
"Accel": "L",
"ToolTip": QT_TRANSLATE_NOOP(
"Assembly_CreateJointParallel",
"<p>Create a Parallel Joint: Aligns two features to be parallel, constraining relative movement to parallel translations.</p>",
),
"CmdType": "ForEdit",
}
def IsActive(self):
return UtilsAssembly.activeAssembly() is not None
def Activated(self):
assembly = UtilsAssembly.activeAssembly()
if not assembly:
return
view = Gui.activeDocument().activeView()
self.panel = TaskAssemblyCreateJoint(assembly, view, 6)
Gui.Control.showDialog(self.panel)
class CommandCreateJointTangent:
def __init__(self):
pass
def GetResources(self):
return {
"Pixmap": "Assembly_CreateJointTangent",
"MenuText": QT_TRANSLATE_NOOP(
"Assembly_CreateJointTangent", "Create Tangent Joint"
),
"Accel": "T",
"ToolTip": QT_TRANSLATE_NOOP(
"Assembly_CreateJointTangent",
"<p>Create a Tangent Joint: Forces two features to be tangent, restricting movement to smooth transitions along their contact surface.</p>",
),
"CmdType": "ForEdit",
}
def IsActive(self):
return UtilsAssembly.activeAssembly() is not None
def Activated(self):
assembly = UtilsAssembly.activeAssembly()
if not assembly:
return
view = Gui.activeDocument().activeView()
self.panel = TaskAssemblyCreateJoint(assembly, view, 7)
Gui.Control.showDialog(self.panel)
class MakeJointSelGate:
def __init__(self, taskbox, assembly):
self.taskbox = taskbox
self.assembly = assembly
def allow(self, doc, obj, sub):
if not sub:
return False
objs_names, element_name = UtilsAssembly.getObjsNamesAndElement(obj.Name, sub)
if self.assembly.Name not in objs_names or element_name == "":
# Only objects within the assembly. And not whole objects, only elements.
return False
if Gui.Selection.isSelected(obj, sub, Gui.Selection.ResolveMode.NoResolve):
# If it's to deselect then it's ok
return True
if len(self.taskbox.current_selection) >= 2:
# No more than 2 elements can be selected for basic joints.
return False
full_obj_name = ".".join(objs_names)
for selection_dict in self.taskbox.current_selection:
if selection_dict["full_obj_name"] == full_obj_name:
# Can't join a solid to itself. So the user need to select 2 different parts.
return False
return True
class TaskAssemblyCreateJoint(QtCore.QObject):
def __init__(self, assembly, view, jointTypeIndex):
super().__init__()
self.assembly = assembly
self.view = view
self.doc = App.ActiveDocument
self.form = Gui.PySideUic.loadUi(":/panels/TaskAssemblyCreateJoint.ui")
self.form.jointType.addItems(JointObject.JointTypes)
self.form.jointType.setCurrentIndex(jointTypeIndex)
Gui.Selection.clearSelection()
Gui.Selection.addSelectionGate(
MakeJointSelGate(self, self.assembly), Gui.Selection.ResolveMode.NoResolve
)
Gui.Selection.addObserver(self, Gui.Selection.ResolveMode.NoResolve)
Gui.Selection.setSelectionStyle(Gui.Selection.SelectionStyle.GreedySelection)
self.current_selection = []
self.preselection_dict = None
self.callbackMove = self.view.addEventCallback(
"SoLocation2Event", self.moveMouse
)
self.callbackKey = self.view.addEventCallback(
"SoKeyboardEvent", self.KeyboardEvent
)
App.setActiveTransaction("Create joint")
self.createJointObject()
def accept(self):
if len(self.current_selection) != 2:
App.Console.PrintWarning(
"You need to select 2 elements from 2 separate parts."
)
return False
self.deactivate()
App.closeActiveTransaction()
return True
def reject(self):
self.deactivate()
App.closeActiveTransaction(True)
return True
def deactivate(self):
Gui.Selection.removeSelectionGate()
Gui.Selection.removeObserver(self)
Gui.Selection.setSelectionStyle(Gui.Selection.SelectionStyle.NormalSelection)
Gui.Selection.clearSelection()
self.view.removeEventCallback("SoLocation2Event", self.callbackMove)
self.view.removeEventCallback("SoKeyboardEvent", self.callbackKey)
if Gui.Control.activeDialog():
Gui.Control.closeDialog()
def createJointObject(self):
type_index = self.form.jointType.currentIndex()
joint_group = self.assembly.getObject("Joints")
if not joint_group:
joint_group = self.assembly.newObject("App::DocumentObjectGroup", "Joints")
self.joint = joint_group.newObject("App::FeaturePython", "Joint")
JointObject.Joint(self.joint, type_index)
JointObject.ViewProviderJoint(self.joint.ViewObject, self.joint)
def updateJoint(self):
# First we build the listwidget
self.form.featureList.clear()
simplified_names = []
for sel in self.current_selection:
# TODO: ideally we probably want to hide the feature name in case of PartDesign bodies. ie body.face12 and not body.pad2.face12
sname = sel["full_element_name"].split(self.assembly.Name + ".", 1)[-1]
simplified_names.append(sname)
self.form.featureList.addItems(simplified_names)
# Then we pass the new list to the join object
self.joint.Proxy.setJointConnectors(self.current_selection)
def moveMouse(self, info):
if len(self.current_selection) >= 2 or (
len(self.current_selection) == 1
and self.current_selection[0]["full_element_name"]
== self.preselection_dict["full_element_name"]
):
self.joint.ViewObject.Proxy.showPreviewJCS(False)
return
cursor_pos = self.view.getCursorPos()
cursor_info = self.view.getObjectInfo(cursor_pos)
# cursor_info example {'x': 41.515, 'y': 7.449, 'z': 16.861, 'ParentObject': <Part object>, 'SubName': 'Body002.Pad.Face5', 'Document': 'part3', 'Object': 'Pad', 'Component': 'Face5'}
if (
not cursor_info
or not self.preselection_dict
or cursor_info["SubName"] != self.preselection_dict["sub_name"]
):
self.joint.ViewObject.Proxy.showPreviewJCS(False)
return
# newPos = self.view.getPoint(*info["Position"]) # This is not what we want, it's not pos on the object but on the focal plane
newPos = App.Vector(cursor_info["x"], cursor_info["y"], cursor_info["z"])
self.preselection_dict["mouse_pos"] = newPos
self.preselection_dict["vertex_name"] = UtilsAssembly.findElementClosestVertex(
self.preselection_dict
)
placement = self.joint.Proxy.findPlacement(
self.preselection_dict["object"],
self.preselection_dict["element_name"],
self.preselection_dict["vertex_name"],
)
self.joint.ViewObject.Proxy.showPreviewJCS(True, placement)
self.previewJCSVisible = True
# 3D view keyboard handler
def KeyboardEvent(self, info):
if info["State"] == "UP" and info["Key"] == "ESCAPE":
self.reject()
if info["State"] == "UP" and info["Key"] == "RETURN":
self.accept()
# selectionObserver stuff
def addSelection(self, doc_name, obj_name, sub_name, mousePos):
full_obj_name = UtilsAssembly.getFullObjName(obj_name, sub_name)
full_element_name = UtilsAssembly.getFullElementName(obj_name, sub_name)
selected_object = UtilsAssembly.getObject(full_element_name)
element_name = UtilsAssembly.getElementName(full_element_name)
selection_dict = {
"object": selected_object,
"element_name": element_name,
"full_element_name": full_element_name,
"full_obj_name": full_obj_name,
"mouse_pos": App.Vector(mousePos[0], mousePos[1], mousePos[2]),
}
selection_dict["vertex_name"] = UtilsAssembly.findElementClosestVertex(
selection_dict
)
self.current_selection.append(selection_dict)
self.updateJoint()
def removeSelection(self, doc_name, obj_name, sub_name, mousePos=None):
full_element_name = UtilsAssembly.getFullElementName(obj_name, sub_name)
# Find and remove the corresponding dictionary from the combined list
selection_dict_to_remove = None
for selection_dict in self.current_selection:
if selection_dict["full_element_name"] == full_element_name:
selection_dict_to_remove = selection_dict
break
if selection_dict_to_remove is not None:
self.current_selection.remove(selection_dict_to_remove)
self.updateJoint()
def setPreselection(self, doc_name, obj_name, sub_name):
if not sub_name:
self.preselection_dict = None
return
full_obj_name = UtilsAssembly.getFullObjName(obj_name, sub_name)
full_element_name = UtilsAssembly.getFullElementName(obj_name, sub_name)
selected_object = UtilsAssembly.getObject(full_element_name)
element_name = UtilsAssembly.getElementName(full_element_name)
self.preselection_dict = {
"object": selected_object,
"sub_name": sub_name,
"element_name": element_name,
"full_element_name": full_element_name,
"full_obj_name": full_obj_name,
}
def clearSelection(self, doc_name):
self.current_selection.clear()
self.updateJoint()
if App.GuiUp:
Gui.addCommand("Assembly_CreateJointFixed", CommandCreateJointFixed())
Gui.addCommand("Assembly_CreateJointRevolute", CommandCreateJointRevolute())
Gui.addCommand("Assembly_CreateJointCylindrical", CommandCreateJointCylindrical())
Gui.addCommand("Assembly_CreateJointSlider", CommandCreateJointSlider())
Gui.addCommand("Assembly_CreateJointBall", CommandCreateJointBall())
Gui.addCommand("Assembly_CreateJointPlanar", CommandCreateJointPlanar())
Gui.addCommand("Assembly_CreateJointParallel", CommandCreateJointParallel())
Gui.addCommand("Assembly_CreateJointTangent", CommandCreateJointTangent())
|
phone-notifications | simple_phone_provider | import logging
from random import randint
from django.core.cache import cache
from .exceptions import FailedToSendSMS, FailedToStartVerification
from .phone_provider import PhoneProvider, ProviderFlags
logger = logging.getLogger(__name__)
class SimplePhoneProvider(PhoneProvider):
"""
SimplePhoneProvider is an example of phone provider which supports only SMS messages.
It is not intended for real-life usage and needed only as example of PhoneProviders suitable to use ONLY in OSS.
"""
def send_notification_sms(self, number, message):
self.send_sms(number, message)
def send_sms(self, number, text):
try:
self._write_to_stdout(number, text)
except Exception as e:
# example of handling provider exceptions and converting them to exceptions from core OnCall code.
logger.error(f"SimplePhoneProvider.send_sms: failed {e}")
raise FailedToSendSMS
def send_verification_sms(self, number):
code = str(randint(100000, 999999))
cache.set(self._cache_key(number), code, timeout=10 * 60)
try:
self._write_to_stdout(number, f"Your verification code is {code}")
except Exception as e:
# Example of handling provider exceptions and converting them to exceptions from core OnCall code.
logger.error(f"SimplePhoneProvider.send_verification_sms: failed {e}")
raise FailedToStartVerification
def finish_verification(self, number, code):
has = cache.get(self._cache_key(number))
if has is not None and has == code:
return number
else:
return None
def _cache_key(self, number):
return f"simple_provider_{number}"
def _write_to_stdout(self, number, text):
# print is just example of sending sms.
# In real-life provider it will be some external api call.
print(f'send message "{text}" to {number}')
@property
def flags(self) -> ProviderFlags:
return ProviderFlags(
configured=True,
test_sms=True,
test_call=False,
verification_call=False,
verification_sms=True,
)
|
graphs | diaperchange_lifetimes | # -*- coding: utf-8 -*-
import plotly.graph_objs as go
import plotly.offline as plotly
from django.utils.translation import gettext as _
from reports import utils
def diaperchange_lifetimes(changes):
"""
Create a graph showing how long diapers last (time between changes).
:param changes: a QuerySet of Diaper Change instances.
:returns: a tuple of the the graph's html and javascript.
"""
changes = changes.order_by("time")
durations = []
last_change = changes.first()
for change in changes[1:]:
duration = change.time - last_change.time
if duration.seconds > 0:
durations.append(duration)
last_change = change
trace = go.Box(
y=[round(d.seconds / 3600, 2) for d in durations],
name=_("Changes"),
jitter=0.3,
pointpos=-1.8,
boxpoints="all",
)
layout_args = utils.default_graph_layout_options()
layout_args["height"] = 800
layout_args["title"] = _("<b>Diaper Lifetimes</b>")
layout_args["yaxis"]["title"] = _("Time between changes (hours)")
layout_args["yaxis"]["zeroline"] = False
layout_args["yaxis"]["dtick"] = 1
fig = go.Figure({"data": [trace], "layout": go.Layout(**layout_args)})
output = plotly.plot(fig, output_type="div", include_plotlyjs=False)
return utils.split_graph_output(output)
|
reports | utils | # -*- coding: utf-8 -*-
def default_graph_layout_options():
"""
Default layout options for all graphs.
:returns: a dict of default options.
"""
return {
"paper_bgcolor": "rgb(52, 58, 64)",
"plot_bgcolor": "rgb(52, 58, 64)",
"font": {
"color": "rgba(255, 255, 255, 1)",
# Bootstrap 4 font family.
"family": '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
"size": 14,
},
"margin": {"b": 80, "t": 80},
"xaxis": {
"titlefont": {"color": "rgba(255, 255, 255, 0.5)"},
"gridcolor": "rgba(0, 0, 0, 0.25)",
"zerolinecolor": "rgba(0, 0, 0, 0.5)",
},
"yaxis": {
"titlefont": {"color": "rgba(255, 255, 255, 0.5)"},
"gridcolor": "rgba(0, 0, 0, 0.25)",
"zerolinecolor": "rgba(0, 0, 0, 0.5)",
},
}
def rangeselector_date():
"""
Graph date range selectors settings for 1w, 2w, 1m, 3m, and all.
:returns: a dict of settings for the selectors.
"""
return {
"bgcolor": "rgb(35, 149, 86)",
"activecolor": "rgb(25, 108, 62)",
"buttons": [
{"count": 7, "label": "1w", "step": "day", "stepmode": "backward"},
{"count": 14, "label": "2w", "step": "day", "stepmode": "backward"},
{"count": 1, "label": "1m", "step": "month", "stepmode": "backward"},
{"count": 3, "label": "3m", "step": "month", "stepmode": "backward"},
{"step": "all"},
],
}
def rangeselector_time():
"""
Graph time range selectors settings for 12h, 24h, 48h, 3d and all.
:returns: a dict of settings for the selectors.
"""
return {
"bgcolor": "rgb(35, 149, 86)",
"activecolor": "rgb(25, 108, 62)",
"buttons": [
{"count": 12, "label": "12h", "step": "hour", "stepmode": "backward"},
{"count": 24, "label": "24h", "step": "hour", "stepmode": "backward"},
{"count": 48, "label": "48h", "step": "hour", "stepmode": "backward"},
{"count": 3, "label": "3d", "step": "day", "stepmode": "backward"},
{"count": 7, "label": "7d", "step": "day", "stepmode": "backward"},
{"step": "all"},
],
}
def split_graph_output(output):
"""
Split out of a Plotly graph in to html and javascript.
:param output: a string of html and javascript comprising the graph.
:returns: a tuple of the the graph's html and javascript.
"""
html, js = output.split("<script")
js = "<script" + js
return html, js
|
orm-bindings | channel_node | import random
from datetime import datetime
from ipv8.keyvault.crypto import default_eccrypto
from pony import orm
from pony.orm.core import DEFAULT, db_session
from tribler.core.components.metadata_store.db.orm_bindings.discrete_clock import clock
from tribler.core.components.metadata_store.db.serialization import (
CHANNEL_NODE,
DELETED,
ChannelNodePayload,
DeletedMetadataPayload,
)
from tribler.core.exceptions import (
InvalidChannelNodeException,
InvalidSignatureException,
)
from tribler.core.utilities.path_util import Path
from tribler.core.utilities.unicode import hexlify
# Metadata, torrents and channel statuses
NEW = 0 # The entry is newly created and is not published yet. It will be committed at the next commit.
TODELETE = 1 # The entry is marked to be removed at the next commit.
COMMITTED = 2 # The entry is committed and seeded.
UPDATED = 6 # One of the entry's properties was updated. It will be committed at the next commit.
LEGACY_ENTRY = 1000 # The entry was converted from the old Tribler DB. It has no signature and should not be shared.
DIRTY_STATUSES = (NEW, TODELETE, UPDATED)
PUBLIC_KEY_LEN = 64
CHANNEL_DESCRIPTION_FLAG = 1
CHANNEL_THUMBNAIL_FLAG = 2
def generate_dict_from_pony_args(cls, skip_list=None, **kwargs):
"""
Note: this is a way to manually define Pony entity default attributes in case we
have to generate the signature before creating an object
"""
d = {}
skip_list = skip_list or []
for attr in cls._attrs_: # pylint: disable=W0212
val = kwargs.get(attr.name, DEFAULT)
if attr.name in skip_list:
continue
d[attr.name] = attr.validate(val, entity=cls)
return d
def define_binding(db, logger=None, key=None): # pylint: disable=R0915
class ChannelNode(db.Entity):
"""
This is the base class of our ORM bindings. It implements methods for signing and serialization of ORM objects.
All other GigaChannel-related ORM classes are derived from it. It is not intended for direct use.
Instead, other classes should derive from it.
"""
_discriminator_ = CHANNEL_NODE
rowid = orm.PrimaryKey(int, size=64, auto=True)
# Serializable
metadata_type = orm.Discriminator(int, size=16)
reserved_flags = orm.Optional(int, size=16, default=0)
origin_id = orm.Optional(int, size=64, default=0, index=True)
public_key = orm.Required(bytes)
id_ = orm.Required(int, size=64)
orm.composite_key(public_key, id_)
orm.composite_index(public_key, origin_id)
timestamp = orm.Required(int, size=64, default=0)
# Signature is nullable. This means that "None" entries are stored in DB as NULLs instead of empty strings.
# NULLs are not checked for uniqueness and not indexed.
# This is necessary to store unsigned signatures without violating the uniqueness constraints.
signature = orm.Optional(bytes, unique=True, nullable=True, default=None)
# Local
added_on = orm.Optional(datetime, default=datetime.utcnow)
status = orm.Optional(int, default=COMMITTED)
# Special class-level properties
_payload_class = ChannelNodePayload
_my_key = key
_logger = logger
# This attribute holds the names of the class attributes that are used by the serializer for the
# corresponding payload type. We only initialize it once on class creation as an optimization.
payload_arguments = _payload_class.__init__.__code__.co_varnames[
: _payload_class.__init__.__code__.co_argcount
][1:]
# A non - personal attribute of an entry is an attribute that would have the same value regardless of where,
# when and who created the entry.
# In other words, it does not depend on the Tribler instance that created it.
# ACHTUNG! On object creation, Pony does not check if discriminator is wrong for the created ORM type!
nonpersonal_attributes = ("metadata_type",)
def __init__(self, *args, **kwargs):
"""
Initialize a metadata object.
All this dance is required to ensure that the signature is there and it is correct.
"""
skip_key_check = False
# Process special keyworded arguments
# "sign_with" argument given, sign with it
private_key_override = None
if "sign_with" in kwargs:
kwargs["public_key"] = kwargs["sign_with"].pub().key_to_bin()[10:]
private_key_override = kwargs.pop("sign_with")
# Free-for-all entries require special treatment
if "public_key" in kwargs and kwargs["public_key"] == b"":
# We have to give the entry an unique sig to honor the DB constraints. We use the entry's id_
# as the sig to keep it unique and short. The uniqueness is guaranteed by DB as it already
# imposes uniqueness constraints on the id_+public_key combination.
if "id_" in kwargs:
kwargs["signature"] = None
skip_key_check = True
else:
# Trying to create an FFA entry without specifying the id_ should be considered an error,
# because assigning id_ automatically by clock breaks anonymity.
# FFA entries should be "timeless" and anonymous.
raise InvalidChannelNodeException(
"Attempted to create %s free-for-all (unsigned) object without specifying id_ : "
% str(self.__class__.__name__)
)
# For putting legacy/test stuff in
skip_key_check = kwargs.pop("skip_key_check", skip_key_check)
if "timestamp" not in kwargs:
kwargs["timestamp"] = clock.tick()
if "id_" not in kwargs:
kwargs["id_"] = int(random.getrandbits(63))
if not private_key_override and not skip_key_check:
# No key/signature given, sign with our own key.
if ("signature" not in kwargs) and (
("public_key" not in kwargs)
or (kwargs["public_key"] == self._my_key.pub().key_to_bin()[10:])
):
private_key_override = self._my_key
# Key/signature given, check them for correctness
elif ("public_key" in kwargs) and ("signature" in kwargs):
try:
self._payload_class(**kwargs)
except InvalidSignatureException as e:
raise InvalidSignatureException(
f"Attempted to create {str(self.__class__.__name__)} object with invalid signature/PK: "
+ (
hexlify(kwargs["signature"])
if "signature" in kwargs
else "empty signature "
)
+ " / "
+ (
hexlify(kwargs["public_key"])
if "public_key" in kwargs
else " empty PK"
)
) from e
if private_key_override:
# Get default values for Pony class attributes. We have to do it manually because we need
# to know the payload signature *before* creating the object.
kwargs = generate_dict_from_pony_args(
self.__class__, skip_list=["signature", "public_key"], **kwargs
)
payload = self._payload_class(
**dict(
kwargs,
public_key=private_key_override.pub().key_to_bin()[10:],
key=private_key_override,
metadata_type=self.metadata_type,
)
)
kwargs["public_key"] = payload.public_key
kwargs["signature"] = payload.signature
super().__init__(*args, **kwargs)
def _serialized(self, key=None):
"""
Serializes the object and returns the result with added signature (tuple output)
:param key: private key to sign object with
:return: (serialized_data, signature) tuple
"""
return self._payload_class( # pylint: disable=W0212
key=key, unsigned=(self.signature is None), **self.to_dict()
)._serialized() # pylint: disable=W0212
def serialized(self, key=None):
"""
Serializes the object and returns the result with added signature (blob output)
:param key: private key to sign object with
:return: serialized_data+signature binary string
"""
return b"".join(self._serialized(key))
def _serialized_delete(self):
"""
Create a special command to delete this metadata and encode it for transfer (tuple output).
:return: (serialized_data, signature) tuple
"""
my_dict = ChannelNode.to_dict(self)
my_dict.update(
{"metadata_type": DELETED, "delete_signature": self.signature}
)
return DeletedMetadataPayload(key=self._my_key, **my_dict)._serialized() # pylint: disable=W0212
def serialized_delete(self):
"""
Create a special command to delete this metadata and encode it for transfer (blob output).
:return: serialized_data+signature binary string
"""
return b"".join(self._serialized_delete())
def serialized_health(self) -> bytes:
return b";"
def to_file(self, filename, key=None):
with open(Path.fix_win_long_file(filename), "wb") as output_file:
output_file.write(self.serialized(key))
def to_delete_file(self, filename):
with open(Path.fix_win_long_file(filename), "wb") as output_file:
output_file.write(self.serialized_delete())
def sign(self, key=None):
if not key:
key = self._my_key
self.public_key = key.pub().key_to_bin()[10:]
_, self.signature = self._serialized(key)
def has_valid_signature(self):
crypto = default_eccrypto
signature_correct = False
key_correct = crypto.is_valid_public_bin(
b"LibNaCLPK:" + bytes(self.public_key)
)
if key_correct:
try:
self._payload_class(**self.to_dict())
except InvalidSignatureException:
signature_correct = False
else:
signature_correct = True
return key_correct and signature_correct
@classmethod
def from_payload(cls, payload):
return cls(**payload.to_dict())
@classmethod
def from_dict(cls, dct):
return cls(**dct)
@property
@db_session
def is_personal(self):
return self._my_key.pub().key_to_bin()[10:] == self.public_key
@db_session
def soft_delete(self):
if self.status == NEW:
# Uncommited metadata. Delete immediately
self.delete()
else:
self.status = TODELETE
def update_properties(self, update_dict):
signed_attribute_changed = False
for k, value in update_dict.items():
if getattr(self, k) != value:
setattr(self, k, value)
signed_attribute_changed = signed_attribute_changed or (
k in self.payload_arguments
)
if signed_attribute_changed:
if self.status != NEW:
self.status = UPDATED
# ACHTUNG! When using the key argument, the thing will still use _local_ timestamp counter!
self.timestamp = clock.tick()
self.sign()
return self
def get_parent_nodes(self):
full_path = {self: True}
node = self
while node:
node = db.CollectionNode.get(
public_key=self.public_key, id_=node.origin_id
)
if node is None:
break
if node in full_path:
# Found id loop, but we return it nonetheless to keep the logic from breaking.
break
full_path[node] = True
if node.origin_id == 0:
break
return tuple(reversed(list(full_path)))
def make_copy(self, tgt_parent_id, attributes_override=None):
dst_dict = attributes_override or {}
for k in self.nonpersonal_attributes:
dst_dict[k] = getattr(self, k)
dst_dict.update({"origin_id": tgt_parent_id, "status": NEW})
return self.__class__(**dst_dict)
def get_type(self) -> int:
return self._discriminator_
def to_simple_dict(self):
"""
Return a basic dictionary with information about the node
"""
simple_dict = {
"type": self.get_type(),
"id": self.id_,
"origin_id": self.origin_id,
"public_key": hexlify(self.public_key),
"status": self.status,
}
return simple_dict
return ChannelNode
|
PyObjCTest | test_ikslideshow | from PyObjCTools.TestSupport import *
from Quartz import *
try:
unicode
except NameError:
unicode = str
class TestIKSlideShowHelper(NSObject):
def slideshowItemAtIndex_(self, idx):
return None
def nameOfSlideshowItemAtIndex_(self, idx):
return None
def canExportSlideshowItemAtIndex_toApplication_(self, idx, app):
return True
def slideshowDidChangeCurrentIndex_(self, idx):
pass
class TestIKSlideshow(TestCase):
@min_os_level("10.5")
def no_testProtocols(self):
self.assertIsInstance(
objc.protocolNamed("IKSlideshowDataSource"), objc.formal_protocol
)
@min_os_level("10.5")
def testProtocolMethods(self):
self.assertArgHasType(
TestIKSlideShowHelper.slideshowItemAtIndex_, 0, objc._C_NSUInteger
)
self.assertArgHasType(
TestIKSlideShowHelper.nameOfSlideshowItemAtIndex_, 0, objc._C_NSUInteger
)
self.assertArgHasType(
TestIKSlideShowHelper.canExportSlideshowItemAtIndex_toApplication_,
0,
objc._C_NSUInteger,
)
self.assertResultIsBOOL(
TestIKSlideShowHelper.canExportSlideshowItemAtIndex_toApplication_
)
self.assertArgHasType(
TestIKSlideShowHelper.slideshowDidChangeCurrentIndex_, 0, objc._C_NSUInteger
)
@min_os_level("10.5")
def testMethods(self):
self.assertResultIsBOOL(IKSlideshow.canExportToApplication_)
@min_os_level("10.5")
def testConstants(self):
self.assertIsInstance(IKSlideshowModeImages, unicode)
self.assertIsInstance(IKSlideshowModePDF, unicode)
self.assertIsInstance(IKSlideshowModeOther, unicode)
self.assertIsInstance(IKSlideshowWrapAround, unicode)
self.assertIsInstance(IKSlideshowStartPaused, unicode)
self.assertIsInstance(IKSlideshowStartIndex, unicode)
self.assertIsInstance(IKSlideshowPDFDisplayBox, unicode)
self.assertIsInstance(IKSlideshowPDFDisplayMode, unicode)
self.assertIsInstance(IKSlideshowPDFDisplaysAsBook, unicode)
self.assertIsInstance(IK_iPhotoBundleIdentifier, unicode)
@min_os_level("10.6")
def testConstants10_6(self):
self.assertIsInstance(IKSlideshowScreen, unicode)
self.assertIsInstance(IKSlideshowAudioFile, unicode)
self.assertIsInstance(IKSlideshowPDFDisplayBox, unicode)
self.assertIsInstance(IKSlideshowPDFDisplayMode, unicode)
self.assertIsInstance(IKSlideshowPDFDisplaysAsBook, unicode)
self.assertIsInstance(IK_ApertureBundleIdentifier, unicode)
self.assertIsInstance(IK_MailBundleIdentifier, unicode)
if __name__ == "__main__":
main()
|
extractor | cbssports | from __future__ import unicode_literals
import re
from ..utils import int_or_none, try_get
# from .cbs import CBSBaseIE
from .common import InfoExtractor
# class CBSSportsEmbedIE(CBSBaseIE):
class CBSSportsEmbedIE(InfoExtractor):
IE_NAME = "cbssports:embed"
_VALID_URL = r"""(?ix)https?://(?:(?:www\.)?cbs|embed\.247)sports\.com/player/embed.+?
(?:
ids%3D(?P<id>[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12})|
pcid%3D(?P<pcid>\d+)
)"""
_TESTS = [
{
"url": "https://www.cbssports.com/player/embed/?args=player_id%3Db56c03a6-231a-4bbe-9c55-af3c8a8e9636%26ids%3Db56c03a6-231a-4bbe-9c55-af3c8a8e9636%26resizable%3D1%26autoplay%3Dtrue%26domain%3Dcbssports.com%26comp_ads_enabled%3Dfalse%26watchAndRead%3D0%26startTime%3D0%26env%3Dprod",
"only_matching": True,
},
{
"url": "https://embed.247sports.com/player/embed/?args=%3fplayer_id%3d1827823171591%26channel%3dcollege-football-recruiting%26pcid%3d1827823171591%26width%3d640%26height%3d360%26autoplay%3dTrue%26comp_ads_enabled%3dFalse%26uvpc%3dhttps%253a%252f%252fwww.cbssports.com%252fapi%252fcontent%252fvideo%252fconfig%252f%253fcfg%253duvp_247sports_v4%2526partner%253d247%26uvpc_m%3dhttps%253a%252f%252fwww.cbssports.com%252fapi%252fcontent%252fvideo%252fconfig%252f%253fcfg%253duvp_247sports_m_v4%2526partner_m%253d247_mobile%26utag%3d247sportssite%26resizable%3dTrue",
"only_matching": True,
},
]
# def _extract_video_info(self, filter_query, video_id):
# return self._extract_feed_info('dJ5BDC', 'VxxJg8Ymh8sE', filter_query, video_id)
def _real_extract(self, url):
uuid, pcid = re.match(self._VALID_URL, url).groups()
query = {"id": uuid} if uuid else {"pcid": pcid}
video = self._download_json(
"https://www.cbssports.com/api/content/video/", uuid or pcid, query=query
)[0]
video_id = video["id"]
title = video["title"]
metadata = video.get("metaData") or {}
# return self._extract_video_info('byId=%d' % metadata['mpxOutletId'], video_id)
# return self._extract_video_info('byGuid=' + metadata['mpxRefId'], video_id)
formats = self._extract_m3u8_formats(
metadata["files"][0]["url"],
video_id,
"mp4",
"m3u8_native",
m3u8_id="hls",
fatal=False,
)
self._sort_formats(formats)
image = video.get("image")
thumbnails = None
if image:
image_path = image.get("path")
if image_path:
thumbnails = [
{
"url": image_path,
"width": int_or_none(image.get("width")),
"height": int_or_none(image.get("height")),
"filesize": int_or_none(image.get("size")),
}
]
return {
"id": video_id,
"title": title,
"formats": formats,
"thumbnails": thumbnails,
"description": video.get("description"),
"timestamp": int_or_none(
try_get(video, lambda x: x["dateCreated"]["epoch"])
),
"duration": int_or_none(metadata.get("duration")),
}
class CBSSportsBaseIE(InfoExtractor):
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
iframe_url = self._search_regex(
r'<iframe[^>]+(?:data-)?src="(https?://[^/]+/player/embed[^"]+)"',
webpage,
"embed url",
)
return self.url_result(iframe_url, CBSSportsEmbedIE.ie_key())
class CBSSportsIE(CBSSportsBaseIE):
IE_NAME = "cbssports"
_VALID_URL = r"https?://(?:www\.)?cbssports\.com/[^/]+/video/(?P<id>[^/?#&]+)"
_TESTS = [
{
"url": "https://www.cbssports.com/college-football/video/cover-3-stanford-spring-gleaning/",
"info_dict": {
"id": "b56c03a6-231a-4bbe-9c55-af3c8a8e9636",
"ext": "mp4",
"title": "Cover 3: Stanford Spring Gleaning",
"description": "The Cover 3 crew break down everything you need to know about the Stanford Cardinal this spring.",
"timestamp": 1617218398,
"upload_date": "20210331",
"duration": 502,
},
}
]
class TwentyFourSevenSportsIE(CBSSportsBaseIE):
IE_NAME = "247sports"
_VALID_URL = r"https?://(?:www\.)?247sports\.com/Video/(?:[^/?#&]+-)?(?P<id>\d+)"
_TESTS = [
{
"url": "https://247sports.com/Video/2021-QB-Jake-Garcia-senior-highlights-through-five-games-10084854/",
"info_dict": {
"id": "4f1265cb-c3b5-44a8-bb1d-1914119a0ccc",
"ext": "mp4",
"title": "2021 QB Jake Garcia senior highlights through five games",
"description": "md5:8cb67ebed48e2e6adac1701e0ff6e45b",
"timestamp": 1607114223,
"upload_date": "20201204",
"duration": 208,
},
}
]
|
albums | main | # Copyright 2004-2007 Joe Wreschnig, Michael Urman, Iñigo Serna
# 2009-2010 Steven Robertson
# 2012-2022 Nick Boultbee
# 2009-2014 Christoph Reiter
# 2018-2020 Uriel Zajaczkovski
# 2019 Ruud van Asseldonk
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from __future__ import absolute_import
import os
from typing import Optional
import cairo
import quodlibet
from gi.repository import Gdk, Gio, GLib, Gtk, Pango
from quodlibet import _, app, config, ngettext, qltk, util
from quodlibet.browsers import Browser
from quodlibet.browsers._base import DisplayPatternMixin
from quodlibet.qltk import Icons
from quodlibet.qltk.completion import EntryWordCompletion
from quodlibet.qltk.cover import get_no_cover_pixbuf
from quodlibet.qltk.image import add_border_widget, get_surface_for_pixbuf
from quodlibet.qltk.information import Information
from quodlibet.qltk.menubutton import MenuButton
from quodlibet.qltk.properties import SongProperties
from quodlibet.qltk.searchbar import SearchBarBox
from quodlibet.qltk.songsmenu import SongsMenu
from quodlibet.qltk.views import AllTreeView
from quodlibet.qltk.x import MenuItem, RadioMenuItem, ScrolledWindow, SymbolicIconImage
from quodlibet.query import Query
from quodlibet.util import DeferredSignal, cmp, connect_destroy, connect_obj, copool
from quodlibet.util.i18n import numeric_phrase
from quodlibet.util.library import background_filter
from .models import AlbumFilterModel, AlbumItem, AlbumModel, AlbumSortModel
from .prefs import DEFAULT_PATTERN_TEXT, Preferences
def get_cover_size():
return AlbumItem(None).COVER_SIZE
class AlbumTagCompletion(EntryWordCompletion):
def __init__(self):
super().__init__()
try:
model = self.__model
except AttributeError:
model = type(self).__model = Gtk.ListStore(str)
self.__refreshmodel()
self.set_model(model)
self.set_text_column(0)
def __refreshmodel(self, *args):
for tag in ["title", "album", "date", "people", "artist", "genre"]:
self.__model.append(row=[tag])
for tag in ["tracks", "discs", "length", "date"]:
self.__model.append(row=["#(" + tag])
for tag in ["rating", "playcount", "skipcount"]:
for suffix in ["avg", "max", "min", "sum"]:
self.__model.append(row=["#(%s:%s" % (tag, suffix)])
def cmpa(a, b):
"""Like cmp but treats values that evaluate to false as inf"""
if not a and b:
return 1
if not b and a:
return -1
return cmp(a, b)
def compare_title(a1, a2):
a1, a2 = a1.album, a2.album
# All albums should stay at the top
if a1 is None:
return -1
if a2 is None:
return 1
# Move albums without a title to the bottom
if not a1.title:
return 1
if not a2.title:
return -1
return cmpa(a1.sort, a2.sort) or cmp(a1.key, a2.key)
def compare_people(a1, a2):
a1, a2 = a1.album, a2.album
if a1 is None:
return -1
if a2 is None:
return 1
if not a1.title:
return 1
if not a2.title:
return -1
return (
cmpa(a1.peoplesort, a2.peoplesort)
or cmpa(a1.date, a2.date)
or cmpa(a1.sort, a2.sort)
or cmp(a1.key, a2.key)
)
def compare_date(a1, a2):
a1, a2 = a1.album, a2.album
if a1 is None:
return -1
if a2 is None:
return 1
if not a1.title:
return 1
if not a2.title:
return -1
return cmpa(a1.date, a2.date) or cmpa(a1.sort, a2.sort) or cmp(a1.key, a2.key)
def compare_date_added(a1, a2):
a1, a2 = a1.album, a2.album
if a1 is None:
return -1
if a2 is None:
return 1
if not a1.title:
return 1
if not a2.title:
return -1
return (
-cmp(a1("~#added"), a2("~#added"))
or cmpa(a1.date, a2.date)
or cmpa(a1.sort, a2.sort)
or cmp(a1.key, a2.key)
)
def compare_original_date(a1, a2):
a1, a2 = a1.album, a2.album
if a1 is None:
return -1
if a2 is None:
return 1
if not a1.title:
return 1
if not a2.title:
return -1
# Take the original date if it is set, or fall back to the regular date
# otherewise.
a1_date = a1.get("originaldate", a1.date)
a2_date = a2.get("originaldate", a2.date)
return cmpa(a1_date, a2_date) or cmpa(a1.sort, a2.sort) or cmp(a1.key, a2.key)
def compare_genre(a1, a2):
a1, a2 = a1.album, a2.album
if a1 is None:
return -1
if a2 is None:
return 1
if not a1.title:
return 1
if not a2.title:
return -1
return (
cmpa(a1.genre, a2.genre)
or cmpa(a1.peoplesort, a2.peoplesort)
or cmpa(a1.date, a2.date)
or cmpa(a1.sort, a2.sort)
or cmp(a1.key, a2.key)
)
def compare_rating(a1, a2):
a1, a2 = a1.album, a2.album
if a1 is None:
return -1
if a2 is None:
return 1
if not a1.title:
return 1
if not a2.title:
return -1
return (
-cmp(a1("~#rating"), a2("~#rating"))
or cmpa(a1.date, a2.date)
or cmpa(a1.sort, a2.sort)
or cmp(a1.key, a2.key)
)
def compare_avgplaycount(a1, a2):
a1, a2 = a1.album, a2.album
if a1 is None:
return -1
if a2 is None:
return 1
if not a1.title:
return 1
if not a2.title:
return -1
return (
-cmp(a1("~#playcount:avg"), a2("~#playcount:avg"))
or cmpa(a1.date, a2.date)
or cmpa(a1.sort, a2.sort)
or cmp(a1.key, a2.key)
)
class PreferencesButton(Gtk.HBox):
def __init__(self, browser, model):
super().__init__()
sort_orders = [
(_("_Title"), self.__compare_title),
(_("_People"), self.__compare_people),
(_("_Date"), self.__compare_date),
(_("_Date Added"), self.__compare_date_added),
(_("_Original Date"), self.__compare_original_date),
(_("_Genre"), self.__compare_genre),
(_("_Rating"), self.__compare_rating),
(_("Play_count"), self.__compare_avgplaycount),
]
menu = Gtk.Menu()
sort_item = Gtk.MenuItem(label=_("Sort _by…"), use_underline=True)
sort_menu = Gtk.Menu()
active = config.getint("browsers", "album_sort", 1)
item = None
for i, (label, func) in enumerate(sort_orders):
item = RadioMenuItem(group=item, label=label, use_underline=True)
model.set_sort_func(100 + i, func)
if i == active:
model.set_sort_column_id(100 + i, Gtk.SortType.ASCENDING)
item.set_active(True)
item.connect(
"toggled", util.DeferredSignal(self.__sort_toggled_cb), model, i
)
sort_menu.append(item)
sort_item.set_submenu(sort_menu)
menu.append(sort_item)
pref_item = MenuItem(_("_Preferences"), Icons.PREFERENCES_SYSTEM)
menu.append(pref_item)
connect_obj(pref_item, "activate", Preferences, browser)
menu.show_all()
button = MenuButton(
SymbolicIconImage(Icons.EMBLEM_SYSTEM, Gtk.IconSize.MENU), arrow=True
)
button.set_menu(menu)
self.pack_start(button, False, False, 0)
def __sort_toggled_cb(self, item, model, num):
if item.get_active():
config.set("browsers", "album_sort", str(num))
model.set_sort_column_id(100 + num, Gtk.SortType.ASCENDING)
def __compare_title(self, model, i1, i2, data):
a1, a2 = model.get_value(i1), model.get_value(i2)
return compare_title(a1, a2)
def __compare_people(self, model, i1, i2, data):
a1, a2 = model.get_value(i1), model.get_value(i2)
return compare_people(a1, a2)
def __compare_date(self, model, i1, i2, data):
a1, a2 = model.get_value(i1), model.get_value(i2)
return compare_date(a1, a2)
def __compare_date_added(self, model, i1, i2, data):
a1, a2 = model.get_value(i1), model.get_value(i2)
return compare_date_added(a1, a2)
def __compare_original_date(self, model, i1, i2, data):
a1, a2 = model.get_value(i1), model.get_value(i2)
return compare_original_date(a1, a2)
def __compare_genre(self, model, i1, i2, data):
a1, a2 = model.get_value(i1), model.get_value(i2)
return compare_genre(a1, a2)
def __compare_rating(self, model, i1, i2, data):
a1, a2 = model.get_value(i1), model.get_value(i2)
return compare_rating(a1, a2)
def __compare_avgplaycount(self, model, i1, i2, data):
a1, a2 = model.get_value(i1), model.get_value(i2)
return compare_avgplaycount(a1, a2)
class VisibleUpdate:
# how many rows should be updated
# beyond the visible area in both directions
PRELOAD_COUNT = 35
def enable_row_update(self, view, sw, column):
connect_obj(view, "draw", self.__update_visibility, view)
connect_destroy(sw.get_vadjustment(), "value-changed", self.__stop_update, view)
self.__pending_paths = []
self.__update_deferred = DeferredSignal(
self.__update_visible_rows, timeout=50, priority=GLib.PRIORITY_LOW
)
self.__column = column
self.__first_expose = True
def disable_row_update(self):
if self.__update_deferred:
self.__update_deferred.abort
self.__update_deferred = None
if self.__pending_paths:
copool.remove(self.__scan_paths)
self.__column = None
self.__pending_paths = []
def _row_needs_update(self, model, iter_):
"""Should return True if the rows should be updated"""
raise NotImplementedError
def _update_row(self, model, iter_):
"""Do whatever is needed to update the row."""
raise NotImplementedError
def __stop_update(self, adj, view):
if self.__pending_paths:
copool.remove(self.__scan_paths)
self.__pending_paths = []
self.__update_visibility(view)
def __update_visibility(self, view, *args):
if not self.__column.get_visible():
return
# update all visible rows on first expose event
if self.__first_expose:
self.__first_expose = False
self.__update_visible_rows(view, 0)
for i in self.__scan_paths():
pass
self.__update_deferred(view, self.PRELOAD_COUNT)
def __scan_paths(self):
while self.__pending_paths:
model, path = self.__pending_paths.pop()
try:
iter_ = model.get_iter(path)
except ValueError:
continue
self._update_row(model, iter_)
yield True
def __update_visible_rows(self, view, preload):
vrange = view.get_visible_range()
if vrange is None:
return
model = view.get_model()
# Generate a path list so that cover scanning starts in the middle
# of the visible area and alternately moves up and down.
start, end = vrange
# pygtk2.12 sometimes returns empty tuples
if not start or not end:
return
start = start.get_indices()[0] - preload - 1
end = end.get_indices()[0] + preload
vlist = list(range(end, start, -1))
top = vlist[: len(vlist) // 2]
bottom = vlist[len(vlist) // 2 :]
top.reverse()
vlist_new = []
for i in vlist:
if top:
vlist_new.append(top.pop())
if bottom:
vlist_new.append(bottom.pop())
vlist_new = filter(lambda s: s >= 0, vlist_new)
vlist_new = map(Gtk.TreePath, vlist_new)
visible_paths = []
for path in vlist_new:
try:
iter_ = model.get_iter(path)
except ValueError:
continue
if self._row_needs_update(model, iter_):
visible_paths.append((model, path))
if not self.__pending_paths and visible_paths:
copool.add(self.__scan_paths)
self.__pending_paths = visible_paths
class AlbumList(Browser, util.InstanceTracker, VisibleUpdate, DisplayPatternMixin):
__model = None
__last_render = None
__last_render_surface = None
_PATTERN_FN = os.path.join(quodlibet.get_user_dir(), "album_pattern")
_DEFAULT_PATTERN_TEXT = DEFAULT_PATTERN_TEXT
name = _("Album List")
accelerated_name = _("_Album List")
keys = ["AlbumList"]
priority = 4
def pack(self, songpane):
container = qltk.ConfigRHPaned("browsers", "albumlist_pos", 0.4)
container.pack1(self, True, False)
container.pack2(songpane, True, False)
return container
def unpack(self, container, songpane):
container.remove(songpane)
container.remove(self)
@classmethod
def init(klass, library):
super(AlbumList, klass).load_pattern()
def finalize(self, restored):
if not restored:
self.view.set_cursor((0,))
@classmethod
def _destroy_model(klass):
klass.__model.destroy()
klass.__model = None
@classmethod
def toggle_covers(klass):
on = config.getboolean("browsers", "album_covers")
for albumlist in klass.instances():
albumlist.__cover_column.set_visible(on)
for column in albumlist.view.get_columns():
column.queue_resize()
def refresh_all(self):
self.__model.refresh_all()
@classmethod
def _init_model(klass, library):
klass.__model = AlbumModel(library)
klass.__library = library
@util.cached_property
def _no_cover(self) -> Optional[cairo.Surface]:
"""Returns a cairo surface representing a missing cover"""
cover_size = get_cover_size()
scale_factor = self.get_scale_factor()
pb = get_no_cover_pixbuf(cover_size, cover_size, scale_factor)
if not pb:
raise IOError("Can't find / scale missing art image")
return get_surface_for_pixbuf(self, pb)
def __init__(self, library):
super().__init__(spacing=6)
self.set_orientation(Gtk.Orientation.VERTICAL)
self._register_instance()
if self.__model is None:
self._init_model(library)
self._cover_cancel = Gio.Cancellable()
sw = ScrolledWindow()
sw.set_shadow_type(Gtk.ShadowType.IN)
self.view = view = AllTreeView()
view.set_headers_visible(False)
model_sort = AlbumSortModel(model=self.__model)
model_filter = AlbumFilterModel(child_model=model_sort)
self.__bg_filter = background_filter()
self.__filter = None
model_filter.set_visible_func(self.__parse_query)
render = Gtk.CellRendererPixbuf()
self.__cover_column = column = Gtk.TreeViewColumn("covers", render)
column.set_visible(config.getboolean("browsers", "album_covers"))
column.set_sizing(Gtk.TreeViewColumnSizing.FIXED)
column.set_fixed_width(get_cover_size() + 12)
render.set_property("height", get_cover_size() + 8)
render.set_property("width", get_cover_size() + 8)
def cell_data_pb(column, cell, model, iter_, no_cover):
item = model.get_value(iter_)
if item.album is None:
surface = None
elif item.cover:
pixbuf = item.cover
pixbuf = add_border_widget(pixbuf, self.view)
surface = get_surface_for_pixbuf(self, pixbuf) or no_cover
# don't cache, too much state has an effect on the result
self.__last_render_surface = None
else:
surface = no_cover
if self.__last_render_surface == surface:
return
self.__last_render_surface = surface
cell.set_property("surface", surface)
column.set_cell_data_func(render, cell_data_pb, self._no_cover)
view.append_column(column)
render = Gtk.CellRendererText()
column = Gtk.TreeViewColumn("albums", render)
column.set_sizing(Gtk.TreeViewColumnSizing.FIXED)
if view.supports_hints():
render.set_property("ellipsize", Pango.EllipsizeMode.END)
def cell_data(column, cell, model, iter_, data):
album = model.get_album(iter_)
if album is None:
text = util.bold(_("All Albums")) + "\n"
text += numeric_phrase("%d album", "%d albums", len(model) - 1)
markup = text
else:
markup = self.display_pattern % album
if self.__last_render == markup:
return
self.__last_render = markup
cell.markup = markup
cell.set_property("markup", markup)
column.set_cell_data_func(render, cell_data)
view.append_column(column)
view.get_selection().set_mode(Gtk.SelectionMode.MULTIPLE)
view.set_rules_hint(True)
view.set_search_equal_func(self.__search_func, None)
view.set_search_column(0)
view.set_model(model_filter)
sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
sw.add(view)
view.connect("row-activated", self.__play_selection)
self.__sig = view.connect(
"selection-changed", util.DeferredSignal(self.__update_songs, owner=view)
)
targets = [
("text/x-quodlibet-songs", Gtk.TargetFlags.SAME_APP, 1),
("text/uri-list", 0, 2),
]
targets = [Gtk.TargetEntry.new(*t) for t in targets]
view.drag_source_set(
Gdk.ModifierType.BUTTON1_MASK, targets, Gdk.DragAction.COPY
)
view.connect("drag-data-get", self.__drag_data_get)
connect_obj(view, "popup-menu", self.__popup, view, library)
self.accelerators = Gtk.AccelGroup()
search = SearchBarBox(
completion=AlbumTagCompletion(), accel_group=self.accelerators
)
search.connect("query-changed", self.__update_filter)
connect_obj(search, "focus-out", lambda w: w.grab_focus(), view)
self.__search = search
prefs = PreferencesButton(self, model_sort)
search.pack_start(prefs, False, True, 0)
hb = Gtk.Box(spacing=3)
hb.pack_start(search, True, True, 6)
self.pack_start(hb, False, True, 0)
self.pack_start(sw, True, True, 0)
self.connect("destroy", self.__destroy)
self.enable_row_update(view, sw, self.__cover_column)
self.connect("key-press-event", self.__key_pressed, library.librarian)
if app.cover_manager:
connect_destroy(app.cover_manager, "cover-changed", self._cover_changed)
self.show_all()
def _cover_changed(self, manager, songs):
model = self.__model
songs = set(songs)
for iter_, item in model.iterrows():
album = item.album
if album is not None and songs & album.songs:
item.scanned = False
model.row_changed(model.get_path(iter_), iter_)
def __key_pressed(self, widget, event, librarian):
if qltk.is_accel(event, "<Primary>I"):
songs = self.__get_selected_songs()
if songs:
window = Information(librarian, songs, self)
window.show()
return True
elif qltk.is_accel(event, "<Primary>Return", "<Primary>KP_Enter"):
qltk.enqueue(self.__get_selected_songs(sort=True))
return True
elif qltk.is_accel(event, "<alt>Return"):
songs = self.__get_selected_songs()
if songs:
window = SongProperties(librarian, songs, self)
window.show()
return True
return False
def _row_needs_update(self, model, iter_):
item = model.get_value(iter_)
return item.album is not None and not item.scanned
def _update_row(self, filter_model, iter_):
sort_model = filter_model.get_model()
model = sort_model.get_model()
iter_ = filter_model.convert_iter_to_child_iter(iter_)
iter_ = sort_model.convert_iter_to_child_iter(iter_)
tref = Gtk.TreeRowReference.new(model, model.get_path(iter_))
def callback():
path = tref.get_path()
if path is not None:
model.row_changed(path, model.get_iter(path))
item = model.get_value(iter_)
scale_factor = self.get_scale_factor()
item.scan_cover(
scale_factor=scale_factor, callback=callback, cancel=self._cover_cancel
)
def __destroy(self, browser):
self._cover_cancel.cancel()
self.disable_row_update()
self.view.set_model(None)
klass = type(browser)
if not klass.instances():
klass._destroy_model()
def __update_filter(self, entry, text, scroll_up=True, restore=False):
model = self.view.get_model()
self.__filter = None
query = self.__search.get_query(star=["~people", "album"])
if not query.matches_all:
self.__filter = query.search
self.__bg_filter = background_filter()
self.__inhibit()
# We could be smart and try to scroll to a selected album
# but that introduces lots of wild scrolling. Feel free to change it.
# Without scrolling the TV tries to stay at the same position
# (40% down) which makes no sense, so always go to the top.
if scroll_up:
self.view.scroll_to_point(0, 0)
# Don't filter on restore if there is nothing to filter
if not restore or self.__filter or self.__bg_filter:
model.refilter()
self.__uninhibit()
def __parse_query(self, model, iter_, data):
f, b = self.__filter, self.__bg_filter
if f is None and b is None:
return True
else:
album = model.get_album(iter_)
if album is None:
return True
elif b is None:
return f(album)
elif f is None:
return b(album)
else:
return b(album) and f(album)
def __search_func(self, model, column, key, iter_, data):
album = model.get_album(iter_)
if album is None:
return True
key = key.lower()
title = album.title.lower()
if key in title:
return False
if config.getboolean("browsers", "album_substrings"):
people = (p.lower() for p in album.list("~people"))
for person in people:
if key in person:
return False
return True
def __popup(self, view, library):
albums = self.__get_selected_albums()
songs = self.__get_songs_from_albums(albums)
items = []
if self.__cover_column.get_visible():
num = len(albums)
button = MenuItem(
ngettext("Reload album _cover", "Reload album _covers", num),
Icons.VIEW_REFRESH,
)
button.connect("activate", self.__refresh_album, view)
items.append(button)
menu = SongsMenu(library, songs, items=[items])
menu.show_all()
return view.popup_menu(menu, 0, Gtk.get_current_event_time())
def __refresh_album(self, menuitem, view):
items = self.__get_selected_items()
for item in items:
item.scanned = False
model = self.view.get_model()
for iter_, item in model.iterrows():
if item in items:
model.row_changed(model.get_path(iter_), iter_)
def __get_selected_items(self):
selection = self.view.get_selection()
model, paths = selection.get_selected_rows()
return model.get_items(paths)
def __get_selected_albums(self):
selection = self.view.get_selection()
model, paths = selection.get_selected_rows()
return model.get_albums(paths)
def __get_songs_from_albums(self, albums, sort=True):
# Sort first by how the albums appear in the model itself,
# then within the album using the default order.
songs = []
if sort:
for album in albums:
songs.extend(sorted(album.songs, key=lambda s: s.sort_key))
else:
for album in albums:
songs.extend(album.songs)
return songs
def __get_selected_songs(self, sort=True):
albums = self.__get_selected_albums()
return self.__get_songs_from_albums(albums, sort)
def __drag_data_get(self, view, ctx, sel, tid, etime):
songs = self.__get_selected_songs()
if tid == 1:
qltk.selection_set_songs(sel, songs)
else:
sel.set_uris([song("~uri") for song in songs])
def __play_selection(self, view, indices, col):
self.songs_activated()
def active_filter(self, song):
for album in self.__get_selected_albums():
if song in album.songs:
return True
return False
def can_filter_text(self):
return True
def filter_text(self, text):
self.__search.set_text(text)
if Query(text).is_parsable:
self.__update_filter(self.__search, text)
self.__inhibit()
self.view.set_cursor((0,))
self.__uninhibit()
self.activate()
def get_filter_text(self):
return self.__search.get_text()
def can_filter(self, key):
# Numerics are different for collections, and although title works,
# it's not of much use here.
if key is not None and (key.startswith("~#") or key == "title"):
return False
return super().can_filter(key)
def can_filter_albums(self):
return True
def list_albums(self):
model = self.view.get_model()
return [row[0].album.key for row in model if row[0].album]
def filter_albums(self, values):
view = self.view
self.__inhibit()
changed = view.select_by_func(lambda r: r[0].album and r[0].album.key in values)
self.view.grab_focus()
self.__uninhibit()
if changed:
self.activate()
def unfilter(self):
self.filter_text("")
self.view.set_cursor((0,))
def activate(self):
self.view.get_selection().emit("changed")
def __inhibit(self):
self.view.handler_block(self.__sig)
def __uninhibit(self):
self.view.handler_unblock(self.__sig)
def restore(self):
text = config.gettext("browsers", "query_text")
entry = self.__search
entry.set_text(text)
# update_filter expects a parsable query
if Query(text).is_parsable:
self.__update_filter(entry, text, scroll_up=False, restore=True)
keys = config.gettext("browsers", "albums").split("\n")
# FIXME: If albums is "" then it could be either all albums or
# no albums. If it's "" and some other stuff, assume no albums,
# otherwise all albums.
self.__inhibit()
if keys == [""]:
self.view.set_cursor((0,))
else:
def select_fun(row):
album = row[0].album
if not album: # all
return False
return album.str_key in keys
self.view.select_by_func(select_fun)
self.__uninhibit()
def scroll(self, song):
album_key = song.album_key
select = lambda r: r[0].album and r[0].album.key == album_key
self.view.select_by_func(select, one=True)
def __get_config_string(self):
selection = self.view.get_selection()
model, paths = selection.get_selected_rows()
# All is selected
if model.contains_all(paths):
return ""
# All selected albums
albums = model.get_albums(paths)
confval = "\n".join((a.str_key for a in albums))
# ConfigParser strips a trailing \n - so we move it to the front
if confval and confval[-1] == "\n":
confval = "\n" + confval[:-1]
return confval
def save(self):
conf = self.__get_config_string()
config.settext("browsers", "albums", conf)
text = self.__search.get_text()
config.settext("browsers", "query_text", text)
def __update_songs(self, view, selection):
songs = self.__get_selected_songs(sort=False)
self.songs_selected(songs)
|
draftutils | todo | # ***************************************************************************
# * (c) 2009, Yorik van Havre <yorik@uncreated.net> *
# * (c) 2019 Eliud Cabrera Castillo <e.cabrera-castillo@tum.de> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * FreeCAD is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with FreeCAD; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provides the ToDo static class to run commands with a time delay.
The `ToDo` class is used to delay the commit of commands for later execution.
This is necessary when a GUI command needs to manipulate the 3D view
in such a way that a callback would crash `Coin`.
The `ToDo` class essentially calls `QtCore.QTimer.singleShot`
to execute the instructions stored in internal lists.
"""
## @package todo
# \ingroup draftutils
# \brief Provides the ToDo static class to run commands with a time delay.
import sys
import traceback
import FreeCAD as App
import FreeCADGui as Gui
import PySide.QtCore as QtCore
from draftutils.messages import _err, _log, _msg, _wrn
__title__ = "FreeCAD Draft Workbench, Todo class"
__author__ = "Yorik van Havre <yorik@uncreated.net>"
__url__ = ["http://www.freecad.org"]
_DEBUG = 0
_DEBUG_inner = 0
## \addtogroup draftutils
# @{
class ToDo:
"""A static class that delays execution of functions.
It calls `QtCore.QTimer.singleShot(0, doTasks)`
where `doTasks` is a static method which executes
the commands stored in the list attributes.
Attributes
----------
itinerary: list of tuples
Each tuple is of the form `(name, arg)`.
The `name` is a reference (pointer) to a function,
and `arg` is the corresponding argument that is passed
to that function.
It then tries executing the function with the argument,
if available, or without it, if not available.
::
name(arg)
name()
commitlist: list of tuples
Each tuple is of the form `(name, command_list)`.
The `name` is a string identifier or description of the commands
that will be run, and `command_list` is a list of strings
that indicate the Python instructions that will be executed,
or a reference to a single function that will be executed.
If `command_list` is a list, the program opens a transaction,
then runs all commands in the list in sequence,
and finally commits the transaction.
::
command_list = ["command1", "command2", "..."]
App.activeDocument().openTransaction(name)
Gui.doCommand("command1")
Gui.doCommand("command2")
Gui.doCommand("...")
App.activeDocument().commitTransaction()
If `command_list` is a reference to a function
the function is executed directly.
::
command_list = function
App.activeDocument().openTransaction(name)
function()
App.activeDocument().commitTransaction()
afteritinerary: list of tuples
Each tuple is of the form `(name, arg)`.
This list is used just like `itinerary`.
Lists
-----
The lists contain tuples. Each tuple contains a `name` which is just
a string to identify the operation, and a `command_list` which is
a list of strings, each string an individual Python instruction.
"""
itinerary = []
commitlist = []
afteritinerary = []
@staticmethod
def doTasks():
"""Execute the commands stored in the lists.
The lists are `itinerary`, `commitlist` and `afteritinerary`.
"""
if _DEBUG:
_msg(
"Debug: doing delayed tasks.\n"
"itinerary: {0}\n"
"commitlist: {1}\n"
"afteritinerary: {2}\n".format(
todo.itinerary, todo.commitlist, todo.afteritinerary
)
)
try:
for f, arg in ToDo.itinerary:
try:
if _DEBUG_inner:
_msg("Debug: executing.\n" "function: {}\n".format(f))
if arg or (arg is False):
f(arg)
else:
f()
except Exception:
_log(traceback.format_exc())
_err(traceback.format_exc())
wrn = (
"ToDo.doTasks, Unexpected error:\n"
"{0}\n"
"in {1}({2})".format(sys.exc_info()[0], f, arg)
)
_wrn(wrn)
except ReferenceError:
_wrn("Debug: ToDo.doTasks: " "queue contains a deleted object, skipping")
ToDo.itinerary = []
if ToDo.commitlist:
for name, func in ToDo.commitlist:
if _DEBUG_inner:
_msg("Debug: committing.\n" "name: {}\n".format(name))
try:
name = str(name)
App.activeDocument().openTransaction(name)
if isinstance(func, list):
for string in func:
Gui.doCommand(string)
else:
func()
App.activeDocument().commitTransaction()
except Exception:
_log(traceback.format_exc())
_err(traceback.format_exc())
wrn = "ToDo.doTasks, Unexpected error:\n" "{0}\n" "in {1}".format(
sys.exc_info()[0], func
)
_wrn(wrn)
# Restack Draft screen widgets after creation
if hasattr(Gui, "Snapper"):
Gui.Snapper.restack()
ToDo.commitlist = []
for f, arg in ToDo.afteritinerary:
try:
if _DEBUG_inner:
_msg("Debug: executing after.\n" "function: {}\n".format(f))
if arg:
f(arg)
else:
f()
except Exception:
_log(traceback.format_exc())
_err(traceback.format_exc())
wrn = "ToDo.doTasks, Unexpected error:\n" "{0}\n" "in {1}({2})".format(
sys.exc_info()[0], f, arg
)
_wrn(wrn)
ToDo.afteritinerary = []
@staticmethod
def delay(f, arg):
"""Add the function and argument to the itinerary list.
Schedule geometry manipulation that would crash Coin if done
in the event callback.
If the `itinerary` list is empty, it will call
`QtCore.QTimer.singleShot(0, ToDo.doTasks)`
to execute the commands in the other lists.
Finally, it will build the tuple `(f, arg)`
and append it to the `itinerary` list.
Parameters
----------
f: function reference
A reference (pointer) to a Python command
which can be executed directly.
::
f()
arg: argument reference
A reference (pointer) to the argument to the `f` function.
::
f(arg)
"""
if _DEBUG:
_msg("Debug: delaying.\n" "function: {}\n".format(f))
if ToDo.itinerary == []:
QtCore.QTimer.singleShot(0, ToDo.doTasks)
ToDo.itinerary.append((f, arg))
@staticmethod
def delayCommit(cl):
"""Execute the other lists, and add to the commit list.
Schedule geometry manipulation that would crash Coin if done
in the event callback.
First it calls
`QtCore.QTimer.singleShot(0, ToDo.doTasks)`
to execute the commands in all lists.
Then the `cl` list is assigned as the new commit list.
Parameters
----------
cl: list of tuples
Each tuple is of the form `(name, command_list)`.
The `name` is a string identifier or description of the commands
that will be run, and `command_list` is a list of strings
that indicate the Python instructions that will be executed.
See the attributes of the `ToDo` class for more information.
"""
if _DEBUG:
_msg("Debug: delaying commit.\n" "commitlist: {}\n".format(cl))
QtCore.QTimer.singleShot(0, ToDo.doTasks)
ToDo.commitlist = cl
@staticmethod
def delayAfter(f, arg):
"""Add the function and argument to the afteritinerary list.
Schedule geometry manipulation that would crash Coin if done
in the event callback.
Works the same as `delay`.
If the `afteritinerary` list is empty, it will call
`QtCore.QTimer.singleShot(0, ToDo.doTasks)`
to execute the commands in the other lists.
Finally, it will build the tuple `(f, arg)`
and append it to the `afteritinerary` list.
"""
if _DEBUG:
_msg("Debug: delaying after.\n" "function: {}\n".format(f))
if ToDo.afteritinerary == []:
QtCore.QTimer.singleShot(0, ToDo.doTasks)
ToDo.afteritinerary.append((f, arg))
# Alias for compatibility with v0.18 and earlier
todo = ToDo
## @}
|
sslcrypto | _ecc | import hashlib
import hmac
import struct
import base58
try:
hashlib.new("ripemd160")
except ValueError:
# No native implementation
from . import _ripemd
def ripemd160(*args):
return _ripemd.new(*args)
else:
# Use OpenSSL
def ripemd160(*args):
return hashlib.new("ripemd160", *args)
class ECC:
# pylint: disable=line-too-long
# name: (nid, p, n, a, b, (Gx, Gy)),
CURVES = {
"secp112r1": (
704,
0xDB7C2ABF62E35E668076BEAD208B,
0xDB7C2ABF62E35E7628DFAC6561C5,
0xDB7C2ABF62E35E668076BEAD2088,
0x659EF8BA043916EEDE8911702B22,
(0x09487239995A5EE76B55F9C2F098, 0xA89CE5AF8724C0A23E0E0FF77500),
),
"secp112r2": (
705,
0xDB7C2ABF62E35E668076BEAD208B,
0x36DF0AAFD8B8D7597CA10520D04B,
0x6127C24C05F38A0AAAF65C0EF02C,
0x51DEF1815DB5ED74FCC34C85D709,
(0x4BA30AB5E892B4E1649DD0928643, 0xADCD46F5882E3747DEF36E956E97),
),
"secp128r1": (
706,
0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF,
0xFFFFFFFE0000000075A30D1B9038A115,
0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFC,
0xE87579C11079F43DD824993C2CEE5ED3,
(0x161FF7528B899B2D0C28607CA52C5B86, 0xCF5AC8395BAFEB13C02DA292DDED7A83),
),
"secp128r2": (
707,
0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF,
0x3FFFFFFF7FFFFFFFBE0024720613B5A3,
0xD6031998D1B3BBFEBF59CC9BBFF9AEE1,
0x5EEEFCA380D02919DC2C6558BB6D8A5D,
(0x7B6AA5D85E572983E6FB32A7CDEBC140, 0x27B6916A894D3AEE7106FE805FC34B44),
),
"secp160k1": (
708,
0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73,
0x0100000000000000000001B8FA16DFAB9ACA16B6B3,
0,
7,
(
0x3B4C382CE37AA192A4019E763036F4F5DD4D7EBB,
0x938CF935318FDCED6BC28286531733C3F03C4FEE,
),
),
"secp160r1": (
709,
0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFF,
0x0100000000000000000001F4C8F927AED3CA752257,
0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFC,
0x001C97BEFC54BD7A8B65ACF89F81D4D4ADC565FA45,
(
0x4A96B5688EF573284664698968C38BB913CBFC82,
0x23A628553168947D59DCC912042351377AC5FB32,
),
),
"secp160r2": (
710,
0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73,
0x0100000000000000000000351EE786A818F3A1A16B,
0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC70,
0x00B4E134D3FB59EB8BAB57274904664D5AF50388BA,
(
0x52DCB034293A117E1F4FF11B30F7199D3144CE6D,
0xFEAFFEF2E331F296E071FA0DF9982CFEA7D43F2E,
),
),
"secp192k1": (
711,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFEE37,
0xFFFFFFFFFFFFFFFFFFFFFFFE26F2FC170F69466A74DEFD8D,
0,
3,
(
0xDB4FF10EC057E9AE26B07D0280B7F4341DA5D1B1EAE06C7D,
0x9B2F2F6D9C5628A7844163D015BE86344082AA88D95E2F9D,
),
),
"prime192v1": (
409,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF,
0xFFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC,
0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1,
(
0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012,
0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811,
),
),
"secp224k1": (
712,
0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFE56D,
0x010000000000000000000000000001DCE8D2EC6184CAF0A971769FB1F7,
0,
5,
(
0xA1455B334DF099DF30FC28A169A467E9E47075A90F7E650EB6B7A45C,
0x7E089FED7FBA344282CAFBD6F7E319F7C0B0BD59E2CA4BDB556D61A5,
),
),
"secp224r1": (
713,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000001,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFFFFFFFFFE,
0xB4050A850C04B3ABF54132565044B0B7D7BFD8BA270B39432355FFB4,
(
0xB70E0CBD6BB4BF7F321390B94A03C1D356C21122343280D6115C1D21,
0xBD376388B5F723FB4C22DFE6CD4375A05A07476444D5819985007E34,
),
),
"secp256k1": (
714,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141,
0,
7,
(
0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798,
0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8,
),
),
"prime256v1": (
715,
0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF,
0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551,
0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC,
0x5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B,
(
0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296,
0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5,
),
),
"secp384r1": (
716,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFF,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFC,
0xB3312FA7E23EE7E4988E056BE3F82D19181D9C6EFE8141120314088F5013875AC656398D8A2ED19D2A85C8EDD3EC2AEF,
(
0xAA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB7,
0x3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F,
),
),
"secp521r1": (
717,
0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,
0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409,
0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC,
0x0051953EB9618E1C9A1F929A21A0B68540EEA2DA725B99B315F3B8B489918EF109E156193951EC7E937B1652C0BD3BB1BF073573DF883D2C34F1EF451FD46B503F00,
(
0x00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66,
0x011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650,
),
),
}
# pylint: enable=line-too-long
def __init__(self, backend, aes):
self._backend = backend
self._aes = aes
def get_curve(self, name):
if name not in self.CURVES:
raise ValueError("Unknown curve {}".format(name))
nid, p, n, a, b, g = self.CURVES[name]
return EllipticCurve(self._backend(p, n, a, b, g), self._aes, nid)
def get_backend(self):
return self._backend.get_backend()
class EllipticCurve:
def __init__(self, backend, aes, nid):
self._backend = backend
self._aes = aes
self.nid = nid
def _encode_public_key(self, x, y, is_compressed=True, raw=True):
if raw:
if is_compressed:
return bytes([0x02 + (y[-1] % 2)]) + x
else:
return bytes([0x04]) + x + y
else:
return (
struct.pack("!HH", self.nid, len(x)) + x + struct.pack("!H", len(y)) + y
)
def _decode_public_key(self, public_key, partial=False):
if not public_key:
raise ValueError("No public key")
if public_key[0] == 0x04:
# Uncompressed
expected_length = 1 + 2 * self._backend.public_key_length
if partial:
if len(public_key) < expected_length:
raise ValueError("Invalid uncompressed public key length")
else:
if len(public_key) != expected_length:
raise ValueError("Invalid uncompressed public key length")
x = public_key[1 : 1 + self._backend.public_key_length]
y = public_key[1 + self._backend.public_key_length : expected_length]
if partial:
return (x, y), expected_length
else:
return x, y
elif public_key[0] in (0x02, 0x03):
# Compressed
expected_length = 1 + self._backend.public_key_length
if partial:
if len(public_key) < expected_length:
raise ValueError("Invalid compressed public key length")
else:
if len(public_key) != expected_length:
raise ValueError("Invalid compressed public key length")
x, y = self._backend.decompress_point(public_key[:expected_length])
# Sanity check
if x != public_key[1:expected_length]:
raise ValueError("Incorrect compressed public key")
if partial:
return (x, y), expected_length
else:
return x, y
else:
raise ValueError("Invalid public key prefix")
def _decode_public_key_openssl(self, public_key, partial=False):
if not public_key:
raise ValueError("No public key")
i = 0
(nid,) = struct.unpack("!H", public_key[i : i + 2])
i += 2
if nid != self.nid:
raise ValueError("Wrong curve")
(xlen,) = struct.unpack("!H", public_key[i : i + 2])
i += 2
if len(public_key) - i < xlen:
raise ValueError("Too short public key")
x = public_key[i : i + xlen]
i += xlen
(ylen,) = struct.unpack("!H", public_key[i : i + 2])
i += 2
if len(public_key) - i < ylen:
raise ValueError("Too short public key")
y = public_key[i : i + ylen]
i += ylen
if partial:
return (x, y), i
else:
if i < len(public_key):
raise ValueError("Too long public key")
return x, y
def new_private_key(self, is_compressed=False):
return self._backend.new_private_key() + (b"\x01" if is_compressed else b"")
def private_to_public(self, private_key):
if len(private_key) == self._backend.public_key_length:
is_compressed = False
elif (
len(private_key) == self._backend.public_key_length + 1
and private_key[-1] == 1
):
is_compressed = True
private_key = private_key[:-1]
else:
raise ValueError("Private key has invalid length")
x, y = self._backend.private_to_public(private_key)
return self._encode_public_key(x, y, is_compressed=is_compressed)
def private_to_wif(self, private_key):
return base58.b58encode_check(b"\x80" + private_key)
def wif_to_private(self, wif):
dec = base58.b58decode_check(wif)
if dec[0] != 0x80:
raise ValueError("Invalid network (expected mainnet)")
return dec[1:]
def public_to_address(self, public_key):
h = hashlib.sha256(public_key).digest()
hash160 = ripemd160(h).digest()
return base58.b58encode_check(b"\x00" + hash160)
def private_to_address(self, private_key):
# Kinda useless but left for quick migration from pybitcointools
return self.public_to_address(self.private_to_public(private_key))
def derive(self, private_key, public_key):
if (
len(private_key) == self._backend.public_key_length + 1
and private_key[-1] == 1
):
private_key = private_key[:-1]
if len(private_key) != self._backend.public_key_length:
raise ValueError("Private key has invalid length")
if not isinstance(public_key, tuple):
public_key = self._decode_public_key(public_key)
return self._backend.ecdh(private_key, public_key)
def _digest(self, data, hash):
if hash is None:
return data
elif callable(hash):
return hash(data)
elif hash == "sha1":
return hashlib.sha1(data).digest()
elif hash == "sha256":
return hashlib.sha256(data).digest()
elif hash == "sha512":
return hashlib.sha512(data).digest()
else:
raise ValueError("Unknown hash/derivation method")
# High-level functions
def encrypt(
self,
data,
public_key,
algo="aes-256-cbc",
derivation="sha256",
mac="hmac-sha256",
return_aes_key=False,
):
# Generate ephemeral private key
private_key = self.new_private_key()
# Derive key
ecdh = self.derive(private_key, public_key)
key = self._digest(ecdh, derivation)
k_enc_len = self._aes.get_algo_key_length(algo)
if len(key) < k_enc_len:
raise ValueError("Too short digest")
k_enc, k_mac = key[:k_enc_len], key[k_enc_len:]
# Encrypt
ciphertext, iv = self._aes.encrypt(data, k_enc, algo=algo)
ephem_public_key = self.private_to_public(private_key)
ephem_public_key = self._decode_public_key(ephem_public_key)
ephem_public_key = self._encode_public_key(*ephem_public_key, raw=False)
ciphertext = iv + ephem_public_key + ciphertext
# Add MAC tag
if callable(mac):
tag = mac(k_mac, ciphertext)
elif mac == "hmac-sha256":
h = hmac.new(k_mac, digestmod="sha256")
h.update(ciphertext)
tag = h.digest()
elif mac == "hmac-sha512":
h = hmac.new(k_mac, digestmod="sha512")
h.update(ciphertext)
tag = h.digest()
elif mac is None:
tag = b""
else:
raise ValueError("Unsupported MAC")
if return_aes_key:
return ciphertext + tag, k_enc
else:
return ciphertext + tag
def decrypt(
self,
ciphertext,
private_key,
algo="aes-256-cbc",
derivation="sha256",
mac="hmac-sha256",
):
# Get MAC tag
if callable(mac):
tag_length = mac.digest_size
elif mac == "hmac-sha256":
tag_length = hmac.new(b"", digestmod="sha256").digest_size
elif mac == "hmac-sha512":
tag_length = hmac.new(b"", digestmod="sha512").digest_size
elif mac is None:
tag_length = 0
else:
raise ValueError("Unsupported MAC")
if len(ciphertext) < tag_length:
raise ValueError("Ciphertext is too small to contain MAC tag")
if tag_length == 0:
tag = b""
else:
ciphertext, tag = ciphertext[:-tag_length], ciphertext[-tag_length:]
orig_ciphertext = ciphertext
if len(ciphertext) < 16:
raise ValueError("Ciphertext is too small to contain IV")
iv, ciphertext = ciphertext[:16], ciphertext[16:]
public_key, pos = self._decode_public_key_openssl(ciphertext, partial=True)
ciphertext = ciphertext[pos:]
# Derive key
ecdh = self.derive(private_key, public_key)
key = self._digest(ecdh, derivation)
k_enc_len = self._aes.get_algo_key_length(algo)
if len(key) < k_enc_len:
raise ValueError("Too short digest")
k_enc, k_mac = key[:k_enc_len], key[k_enc_len:]
# Verify MAC tag
if callable(mac):
expected_tag = mac(k_mac, orig_ciphertext)
elif mac == "hmac-sha256":
h = hmac.new(k_mac, digestmod="sha256")
h.update(orig_ciphertext)
expected_tag = h.digest()
elif mac == "hmac-sha512":
h = hmac.new(k_mac, digestmod="sha512")
h.update(orig_ciphertext)
expected_tag = h.digest()
elif mac is None:
expected_tag = b""
if not hmac.compare_digest(tag, expected_tag):
raise ValueError("Invalid MAC tag")
return self._aes.decrypt(ciphertext, iv, k_enc, algo=algo)
def sign(self, data, private_key, hash="sha256", recoverable=False, entropy=None):
if len(private_key) == self._backend.public_key_length:
is_compressed = False
elif (
len(private_key) == self._backend.public_key_length + 1
and private_key[-1] == 1
):
is_compressed = True
private_key = private_key[:-1]
else:
raise ValueError("Private key has invalid length")
data = self._digest(data, hash)
if not entropy:
v = b"\x01" * len(data)
k = b"\x00" * len(data)
k = hmac.new(k, v + b"\x00" + private_key + data, "sha256").digest()
v = hmac.new(k, v, "sha256").digest()
k = hmac.new(k, v + b"\x01" + private_key + data, "sha256").digest()
v = hmac.new(k, v, "sha256").digest()
entropy = hmac.new(k, v, "sha256").digest()
return self._backend.sign(
data, private_key, recoverable, is_compressed, entropy=entropy
)
def recover(self, signature, data, hash="sha256"):
# Sanity check: is this signature recoverable?
if len(signature) != 1 + 2 * self._backend.public_key_length:
raise ValueError("Cannot recover an unrecoverable signature")
x, y = self._backend.recover(signature, self._digest(data, hash))
is_compressed = signature[0] >= 31
return self._encode_public_key(x, y, is_compressed=is_compressed)
def verify(self, signature, data, public_key, hash="sha256"):
if len(signature) == 1 + 2 * self._backend.public_key_length:
# Recoverable signature
signature = signature[1:]
if len(signature) != 2 * self._backend.public_key_length:
raise ValueError("Invalid signature format")
if not isinstance(public_key, tuple):
public_key = self._decode_public_key(public_key)
return self._backend.verify(signature, self._digest(data, hash), public_key)
def derive_child(self, seed, child):
# Based on BIP32
if not 0 <= child < 2**31:
raise ValueError("Invalid child index")
return self._backend.derive_child(seed, child)
|
queries | session_recording_properties | from datetime import timedelta
from typing import TYPE_CHECKING, Any, Dict, List, NamedTuple, Tuple
from posthog.client import sync_execute
from posthog.models.event.util import parse_properties
from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter
from posthog.queries.event_query import EventQuery
if TYPE_CHECKING:
from posthog.models import Team
class EventFiltersSQL(NamedTuple):
aggregate_select_clause: str
aggregate_having_clause: str
where_conditions: str
params: Dict[str, Any]
class SessionRecordingQueryResult(NamedTuple):
results: List
has_more_recording: bool
class SessionRecordingProperties(EventQuery):
_filter: SessionRecordingsFilter
_session_ids: List[str]
SESSION_RECORDING_PROPERTIES_ALLOWLIST = {
"$os",
"$browser",
"$device_type",
"$current_url",
"$host",
"$pathname",
"$geoip_country_code",
"$geoip_country_name",
}
# First $pageview event in a recording is used to extract metadata (brower, location, etc.) without
# having to return all events.
_core_single_pageview_event_query = """
SELECT
"$session_id" AS session_id,
any(properties) AS properties
FROM events
PREWHERE
team_id = %(team_id)s
AND event IN ['$pageview', '$autocapture']
{session_ids_clause}
{events_timestamp_clause}
GROUP BY session_id
"""
def __init__(
self, team: "Team", session_ids: List[str], filter: SessionRecordingsFilter
):
super().__init__(team=team, filter=filter)
self._session_ids = sorted(session_ids) # Sort for stable queries
def _determine_should_join_distinct_ids(self) -> None:
self._should_join_distinct_ids = False
# We want to select events beyond the range of the recording to handle the case where
# a recording spans the time boundaries
def _get_events_timestamp_clause(self) -> Tuple[str, Dict[str, Any]]:
timestamp_clause = ""
timestamp_params = {}
if self._filter.date_from:
timestamp_clause += "\nAND timestamp >= %(event_start_time)s"
timestamp_params["event_start_time"] = self._filter.date_from - timedelta(
hours=12
)
if self._filter.date_to:
timestamp_clause += "\nAND timestamp <= %(event_end_time)s"
timestamp_params["event_end_time"] = self._filter.date_to + timedelta(
hours=12
)
return timestamp_clause, timestamp_params
def format_session_recording_id_filters(self) -> Tuple[str, Dict]:
where_conditions = "AND session_id IN %(session_ids)s"
return where_conditions, {"session_ids": self._session_ids}
def get_query(self) -> Tuple[str, Dict[str, Any]]:
base_params = {"team_id": self._team_id}
(
events_timestamp_clause,
events_timestamp_params,
) = self._get_events_timestamp_clause()
(
session_ids_clause,
session_ids_params,
) = self.format_session_recording_id_filters()
return (
self._core_single_pageview_event_query.format(
events_timestamp_clause=events_timestamp_clause,
session_ids_clause=session_ids_clause,
),
{**base_params, **events_timestamp_params, **session_ids_params},
)
def _data_to_return(self, results: List[Any]) -> List[Dict[str, Any]]:
return [
{
"session_id": row[0],
"properties": parse_properties(
row[1], self.SESSION_RECORDING_PROPERTIES_ALLOWLIST
),
}
for row in results
]
def run(self) -> List:
query, query_params = self.get_query()
query_results = sync_execute(query, query_params)
session_recording_properties = self._data_to_return(query_results)
return session_recording_properties
|
PyObjCTest | test_structs | """
XXX: Add tests that check that the type actually works as expected:
* Use struct value as method argument
* Return struct value from a method
Add tests for nested structs as well (that is assert that NSRect.location is
an NSPoint, but using our own types)
"""
import objc
from PyObjCTest.fnd import NSObject
from PyObjCTest.structs import *
from PyObjCTools.TestSupport import *
class TestStructs(TestCase):
def testCreateExplicit(self):
tp = objc.createStructType(
"FooStruct", b"{_FooStruct=ffff}", ["a", "b", "c", "d"]
)
self.assertIsInstance(tp, type)
self.assertEqual(tp.__typestr__, b"{_FooStruct=ffff}")
self.assertEqual(tp._fields, ("a", "b", "c", "d"))
o = tp()
self.assertHasAttr(o, "a")
self.assertHasAttr(o, "b")
self.assertHasAttr(o, "c")
self.assertHasAttr(o, "d")
self.assertHasAttr(objc.ivar, "FooStruct")
v = objc.ivar.FooStruct()
self.assertIsInstance(v, objc.ivar)
self.assertEqual(v.__typestr__, tp.__typestr__)
def testNamedTupleAPI(self):
Point = objc.createStructType("OCPoint", b"{_OCPoint=dd}", ["x", "y"])
Line = objc.createStructType(
"OCLine",
b"{_OCLine={_OCPoint=dd}{_OCPoint=dd}}d",
["start", "stop", "width"],
)
self.assertEqual(Point._fields, ("x", "y"))
self.assertEqual(Line._fields, ("start", "stop", "width"))
p = Point(3, 4)
self.assertEqual(p.x, 3.0)
self.assertEqual(p.y, 4.0)
self.assertEqual(p._asdict(), {"x": 3.0, "y": 4.0})
p2 = p._replace(y=5)
self.assertEqual(p.x, 3.0)
self.assertEqual(p.y, 4.0)
self.assertEqual(p2.x, 3.0)
self.assertEqual(p2.y, 5)
l = Line(Point(1, 2), Point(8, 9), 7)
self.assertEqual(l.start.x, 1.0)
self.assertEqual(l.start.y, 2.0)
self.assertEqual(l.stop.x, 8.0)
self.assertEqual(l.stop.y, 9.0)
self.assertEqual(l.width, 7.0)
self.assertEqual(
l._asdict(), {"start": Point(1, 2), "stop": Point(8, 9), "width": 7.0}
)
l2 = l._replace(stop=Point(3, 4), width=0.5)
self.assertEqual(l.start.x, 1.0)
self.assertEqual(l.start.y, 2.0)
self.assertEqual(l.stop.x, 8.0)
self.assertEqual(l.stop.y, 9.0)
self.assertEqual(l.width, 7.0)
self.assertEqual(l2.start.x, 1.0)
self.assertEqual(l2.start.y, 2.0)
self.assertEqual(l2.stop.x, 3.0)
self.assertEqual(l2.stop.y, 4.0)
self.assertEqual(l2.width, 0.5)
def testCreateImplicit(self):
tp = objc.createStructType("BarStruct", b'{_BarStruct="e"f"f"f"g"f"h"f}', None)
self.assertIsInstance(tp, type)
self.assertEqual(tp.__typestr__, b"{_BarStruct=ffff}")
o = tp()
self.assertHasAttr(o, "e")
self.assertHasAttr(o, "f")
self.assertHasAttr(o, "g")
self.assertHasAttr(o, "h")
self.assertEqual(tp._fields, ("e", "f", "g", "h"))
self.assertRaises(
ValueError, objc.createStructType, "Foo2", b'{_Foo=f"a"}', None
)
self.assertRaises(
ValueError, objc.createStructType, "Foo3", b'{_Foo="a"f', None
)
self.assertRaises(
ValueError, objc.createStructType, "Foo4", b'^{_Foo="a"f}', None
)
def testPointerFields(self):
# Note: the created type won't be all that useful unless the pointer
# happens to be something that PyObjC knows how to deal with, this is
# more a check to see if createStructType knows how to cope with
# non-trivial types.
tp = objc.createStructType(
"XBarStruct", b'{_XBarStruct="e"^f"f"^f"g"^@"h"f}', None
)
self.assertIsInstance(tp, type)
self.assertEqual(tp.__typestr__, b"{_XBarStruct=^f^f^@f}")
o = tp()
self.assertHasAttr(o, "e")
self.assertHasAttr(o, "f")
self.assertHasAttr(o, "g")
self.assertHasAttr(o, "h")
def testEmbeddedFields(self):
tp = objc.createStructType("BarStruct", b'{FooStruct="first"i"second"i}', None)
v = OC_StructTest.createWithFirst_andSecond_(1, 2)
self.assertIsInstance(v, tp)
x = OC_StructTest.sumFields_(v)
self.assertEqual(x, v.first + v.second)
self.assertEqual(v.first, 1)
self.assertEqual(v.second, 2)
self.assertHasAttr(objc.ivar, "BarStruct")
v = objc.ivar.BarStruct()
self.assertEqual(v.__typestr__, b"{FooStruct=ii}")
def testStructCallback(self):
"""
Regression test for an issue reported on the PyObjC mailinglist.
"""
tp = objc.createStructType("FooStruct", b'{FooStruct="first"i"second"i}', None)
StructArrayDelegate = objc.informal_protocol(
"StructArrayDelegate",
[
objc.selector(
None, b"arrayOf4Structs:", signature=b"@@:[4{FooStruct=ii}]"
),
],
)
class OC_PyStruct(NSObject):
def arrayOf4Structs_(self, value):
return value
self.assertEqual(
OC_PyStruct.arrayOf4Structs_.signature,
b"@@:[4{FooStruct=" + objc._C_INT + objc._C_INT + b"}]",
)
o = OC_PyStruct.alloc().init()
v = OC_StructTest.callArrayOf4Structs_(o)
self.assertEqual(len(v), 4)
for i in range(3):
self.assertIsInstance(v[i], tp)
self.assertEqual(v[0], tp(1, 2))
self.assertEqual(v[1], tp(3, 4))
self.assertEqual(v[2], tp(5, 6))
self.assertEqual(v[3], tp(7, 8))
if __name__ == "__main__":
main()
|
fta | majorityvotegate | """Majority vote gate item definition."""
from math import pi
from gaphas.geometry import Rectangle
from gaphor.core.modeling import DrawContext
from gaphor.diagram.presentation import (
Classified,
ElementPresentation,
from_package_str,
)
from gaphor.diagram.shapes import Box, IconBox, Text, stroke
from gaphor.diagram.support import represents
from gaphor.diagram.text import FontStyle, FontWeight
from gaphor.RAAML import raaml
from gaphor.RAAML.fta.constants import DEFAULT_FTA_MAJOR, DEFAULT_FTA_MINOR
from gaphor.UML.recipes import stereotypes_str
DEFAULT_WIDTH = 40
@represents(raaml.MAJORITY_VOTE)
class MajorityVoteItem(Classified, ElementPresentation):
def __init__(self, diagram, id=None):
super().__init__(diagram, id, width=DEFAULT_WIDTH, height=DEFAULT_FTA_MAJOR)
self.watch("subject[NamedElement].name").watch(
"subject[NamedElement].namespace.name"
)
def update_shapes(self, event=None):
self.shape = IconBox(
Box(
draw=draw_majority_vote_gate,
),
Text(
text=lambda: stereotypes_str(
self.subject, [self.diagram.gettext("Majority Vote Gate")]
),
),
Text(
text=lambda: self.subject.name or "",
width=lambda: self.width - 4,
style={
"font-weight": FontWeight.BOLD,
"font-style": FontStyle.NORMAL,
},
),
Text(
text=lambda: from_package_str(self),
style={"font-size": "x-small"},
),
)
def draw_majority_vote_gate(box, context: DrawContext, bounding_box: Rectangle):
cr = context.cairo
left = 0
right = bounding_box.width
wall_top = bounding_box.height / 3.0
wall_bottom = bounding_box.height
# Left wall
cr.move_to(left, wall_bottom)
cr.line_to(left, wall_top)
# Right wall
cr.move_to(right, wall_bottom)
cr.line_to(right, wall_top)
# Top arc
rx = right - left
ry = bounding_box.height * 2.0 / 3.0
cr.move_to(left, wall_top)
cr.save()
cr.translate(left + rx / 2.0, wall_top)
cr.scale(rx / 2.0, ry / 2.0)
cr.arc(0.0, 0.0, 1.0, pi, 0)
cr.restore()
# Bottom arc
ry = bounding_box.height / 4.0
cr.move_to(left, wall_bottom)
cr.save()
cr.translate(left + rx / 2.0, wall_bottom)
cr.scale(rx / 2.0, ry / 2.0)
cr.arc(0.0, 0.0, 1.0, pi, 0)
cr.restore()
# Draw "m"
text = "m"
if bounding_box.height > 3 * bounding_box.width:
cr.set_font_size(32 * bounding_box.width / DEFAULT_WIDTH)
elif bounding_box.width > 3 * bounding_box.height:
cr.set_font_size(40 * bounding_box.height / DEFAULT_FTA_MAJOR)
else:
cr.set_font_size(
17
* (bounding_box.width + bounding_box.height)
/ (DEFAULT_FTA_MINOR + DEFAULT_FTA_MAJOR)
)
x_bearing, y_bearing, width, height, x_advance, y_advance = cr.text_extents(text)
x = bounding_box.width / 2.0 - (width / 2 + x_bearing)
y = bounding_box.height / 2.0 - (height / 2 + y_bearing)
cr.move_to(x, y)
cr.show_text(text)
stroke(context, fill=True)
|
archivebox | main | __package__ = "archivebox"
import os
import platform
import shutil
import sys
from datetime import date, datetime
from pathlib import Path
from typing import IO, Dict, Iterable, List, Optional, Union
from crontab import CronSlices, CronTab
from django.db.models import QuerySet
from django.utils import timezone
from .cli import (
archive_cmds,
display_first,
list_subcommands,
main_cmds,
meta_cmds,
run_subcommand,
)
from .config import (
ALLOWED_IN_OUTPUT_DIR,
ANSI,
ARCHIVE_DIR,
ARCHIVE_DIR_NAME,
ARCHIVEBOX_BINARY,
CHROME_BINARY,
CHROME_VERSION,
CODE_LOCATIONS,
COMMIT_HASH,
CONFIG,
CONFIG_FILE,
DATA_LOCATIONS,
DEBUG,
DEPENDENCIES,
ENFORCE_ATOMIC_WRITES,
EXTERNAL_LOCATIONS,
HTML_INDEX_FILENAME,
IN_DOCKER,
IS_TTY,
JSON_INDEX_FILENAME,
LOGS_DIR,
MERCURY_VERSION,
NODE_VERSION,
ONLY_NEW,
OUTPUT_DIR,
OUTPUT_PERMISSIONS,
PACKAGE_DIR,
PGID,
PUID,
PYTHON_BINARY,
READABILITY_VERSION,
SEARCH_BACKEND_ENGINE,
SINGLEFILE_VERSION,
SOURCES_DIR,
SQL_INDEX_FILENAME,
TIMEZONE,
USER,
USER_CONFIG,
VERSION,
YOUTUBEDL_BINARY,
YOUTUBEDL_VERSION,
ConfigDict,
check_data_folder,
check_dependencies,
get_real_name,
hint,
load_all_config,
setup_django,
stderr,
write_config_file,
)
from .extractors import archive_link, archive_links, ignore_methods
from .index import (
dedupe_links,
fix_invalid_folder_locations,
get_archived_folders,
get_corrupted_folders,
get_duplicate_folders,
get_indexed_folders,
get_invalid_folders,
get_orphaned_folders,
get_present_folders,
get_unarchived_folders,
get_unrecognized_folders,
get_valid_folders,
load_main_index,
parse_links_from_source,
snapshot_filter,
write_link_details,
write_main_index,
)
from .index.csv import links_to_csv
from .index.html import generate_index_from_links
from .index.json import (
generate_json_index_from_links,
parse_json_links_details,
parse_json_main_index,
)
from .index.schema import Link
from .index.sql import apply_migrations, get_admins, remove_from_sql_main_index
from .logging_util import (
TERM_WIDTH,
TimedProgress,
log_crawl_started,
log_importing_started,
log_list_finished,
log_list_started,
log_removal_finished,
log_removal_started,
printable_config,
printable_dependency_version,
printable_filesize,
printable_folder_status,
printable_folders,
)
from .parsers import parse_links_memory, save_file_as_source, save_text_as_source
from .search import flush_search_index, index_links
from .system import CRON_COMMENT, dedupe_cron_jobs, get_dir_size
from .system import run as run_shell
from .util import enforce_types # type: ignore
@enforce_types
def help(out_dir: Path = OUTPUT_DIR) -> None:
"""Print the ArchiveBox help message and usage"""
all_subcommands = list_subcommands()
COMMANDS_HELP_TEXT = (
"\n ".join(
f"{cmd.ljust(20)} {summary}"
for cmd, summary in all_subcommands.items()
if cmd in meta_cmds
)
+ "\n\n "
+ "\n ".join(
f"{cmd.ljust(20)} {summary}"
for cmd, summary in all_subcommands.items()
if cmd in main_cmds
)
+ "\n\n "
+ "\n ".join(
f"{cmd.ljust(20)} {summary}"
for cmd, summary in all_subcommands.items()
if cmd in archive_cmds
)
+ "\n\n "
+ "\n ".join(
f"{cmd.ljust(20)} {summary}"
for cmd, summary in all_subcommands.items()
if cmd not in display_first
)
)
if (Path(out_dir) / SQL_INDEX_FILENAME).exists():
print(
"""{green}ArchiveBox v{}: The self-hosted internet archive.{reset}
{lightred}Active data directory:{reset}
{}
{lightred}Usage:{reset}
archivebox [command] [--help] [--version] [...args]
{lightred}Commands:{reset}
{}
{lightred}Example Use:{reset}
mkdir my-archive; cd my-archive/
archivebox init
archivebox status
archivebox add https://example.com/some/page
archivebox add --depth=1 ~/Downloads/bookmarks_export.html
archivebox list --sort=timestamp --csv=timestamp,url,is_archived
archivebox schedule --every=day https://example.com/some/feed.rss
archivebox update --resume=15109948213.123
{lightred}Documentation:{reset}
https://github.com/ArchiveBox/ArchiveBox/wiki
""".format(VERSION, out_dir, COMMANDS_HELP_TEXT, **ANSI)
)
else:
print("{green}Welcome to ArchiveBox v{}!{reset}".format(VERSION, **ANSI))
print()
if IN_DOCKER:
print(
"When using Docker, you need to mount a volume to use as your data dir:"
)
print(" docker run -v /some/path:/data archivebox ...")
print()
print("To import an existing archive (from a previous version of ArchiveBox):")
print(
" 1. cd into your data dir OUTPUT_DIR (usually ArchiveBox/output) and run:"
)
print(" 2. archivebox init")
print()
print("To start a new archive:")
print(" 1. Create an empty directory, then cd into it and run:")
print(" 2. archivebox init")
print()
print("For more information, see the documentation here:")
print(" https://github.com/ArchiveBox/ArchiveBox/wiki")
@enforce_types
def version(quiet: bool = False, out_dir: Path = OUTPUT_DIR) -> None:
"""Print the ArchiveBox version and dependency information"""
print(VERSION)
if not quiet:
# 0.6.3
# ArchiveBox v0.6.3 Cpython Linux Linux-4.19.121-linuxkit-x86_64-with-glibc2.28 x86_64 (in Docker) (in TTY)
# DEBUG=False IN_DOCKER=True IS_TTY=True TZ=UTC FS_ATOMIC=True FS_REMOTE=False FS_PERMS=644 501:20 SEARCH_BACKEND=ripgrep
p = platform.uname()
print(
"ArchiveBox v{}".format(VERSION),
*((COMMIT_HASH[:7],) if COMMIT_HASH else ()),
sys.implementation.name.title(),
p.system,
platform.platform(),
p.machine,
)
OUTPUT_IS_REMOTE_FS = (
DATA_LOCATIONS["OUTPUT_DIR"]["is_mount"]
or DATA_LOCATIONS["ARCHIVE_DIR"]["is_mount"]
)
print(
f"DEBUG={DEBUG}",
f"IN_DOCKER={IN_DOCKER}",
f"IS_TTY={IS_TTY}",
f"TZ={TIMEZONE}",
# f'DB=django.db.backends.sqlite3 (({CONFIG["SQLITE_JOURNAL_MODE"]})', # add this if we have more useful info to show eventually
f"FS_ATOMIC={ENFORCE_ATOMIC_WRITES}",
f"FS_REMOTE={OUTPUT_IS_REMOTE_FS}",
f"FS_PERMS={OUTPUT_PERMISSIONS} {PUID}:{PGID}",
f"SEARCH_BACKEND={SEARCH_BACKEND_ENGINE}",
)
print()
print("{white}[i] Dependency versions:{reset}".format(**ANSI))
for name, dependency in DEPENDENCIES.items():
print(printable_dependency_version(name, dependency))
# add a newline between core dependencies and extractor dependencies for easier reading
if name == "ARCHIVEBOX_BINARY":
print()
print()
print("{white}[i] Source-code locations:{reset}".format(**ANSI))
for name, folder in CODE_LOCATIONS.items():
print(printable_folder_status(name, folder))
print()
print("{white}[i] Secrets locations:{reset}".format(**ANSI))
for name, folder in EXTERNAL_LOCATIONS.items():
print(printable_folder_status(name, folder))
print()
if DATA_LOCATIONS["OUTPUT_DIR"]["is_valid"]:
print("{white}[i] Data locations:{reset}".format(**ANSI))
for name, folder in DATA_LOCATIONS.items():
print(printable_folder_status(name, folder))
else:
print()
print("{white}[i] Data locations:{reset}".format(**ANSI))
print()
check_dependencies()
@enforce_types
def run(
subcommand: str,
subcommand_args: Optional[List[str]],
stdin: Optional[IO] = None,
out_dir: Path = OUTPUT_DIR,
) -> None:
"""Run a given ArchiveBox subcommand with the given list of args"""
run_subcommand(
subcommand=subcommand,
subcommand_args=subcommand_args,
stdin=stdin,
pwd=out_dir,
)
@enforce_types
def init(
force: bool = False,
quick: bool = False,
setup: bool = False,
out_dir: Path = OUTPUT_DIR,
) -> None:
"""Initialize a new ArchiveBox collection in the current directory"""
from core.models import Snapshot
out_dir.mkdir(exist_ok=True)
is_empty = not len(set(os.listdir(out_dir)) - ALLOWED_IN_OUTPUT_DIR)
if (out_dir / JSON_INDEX_FILENAME).exists():
stderr(
"[!] This folder contains a JSON index. It is deprecated, and will no longer be kept up to date automatically.",
color="lightyellow",
)
stderr(
" You can run `archivebox list --json --with-headers > static_index.json` to manually generate it.",
color="lightyellow",
)
existing_index = (out_dir / SQL_INDEX_FILENAME).exists()
if is_empty and not existing_index:
print(
"{green}[+] Initializing a new ArchiveBox v{} collection...{reset}".format(
VERSION, **ANSI
)
)
print(
"{green}----------------------------------------------------------------------{reset}".format(
**ANSI
)
)
elif existing_index:
# TODO: properly detect and print the existing version in current index as well
print(
"{green}[^] Verifying and updating existing ArchiveBox collection to v{}...{reset}".format(
VERSION, **ANSI
)
)
print(
"{green}----------------------------------------------------------------------{reset}".format(
**ANSI
)
)
else:
if force:
stderr(
"[!] This folder appears to already have files in it, but no index.sqlite3 is present.",
color="lightyellow",
)
stderr(
" Because --force was passed, ArchiveBox will initialize anyway (which may overwrite existing files)."
)
else:
stderr(
(
"{red}[X] This folder appears to already have files in it, but no index.sqlite3 present.{reset}\n\n"
" You must run init in a completely empty directory, or an existing data folder.\n\n"
" {lightred}Hint:{reset} To import an existing data folder make sure to cd into the folder first, \n"
" then run and run 'archivebox init' to pick up where you left off.\n\n"
" (Always make sure your data folder is backed up first before updating ArchiveBox)"
).format(out_dir, **ANSI)
)
raise SystemExit(2)
if existing_index:
print(
"\n{green}[*] Verifying archive folder structure...{reset}".format(**ANSI)
)
else:
print("\n{green}[+] Building archive folder structure...{reset}".format(**ANSI))
print(
f" + ./{ARCHIVE_DIR.relative_to(OUTPUT_DIR)}, ./{SOURCES_DIR.relative_to(OUTPUT_DIR)}, ./{LOGS_DIR.relative_to(OUTPUT_DIR)}..."
)
Path(SOURCES_DIR).mkdir(exist_ok=True)
Path(ARCHIVE_DIR).mkdir(exist_ok=True)
Path(LOGS_DIR).mkdir(exist_ok=True)
print(f" + ./{CONFIG_FILE.relative_to(OUTPUT_DIR)}...")
write_config_file({}, out_dir=out_dir)
if (out_dir / SQL_INDEX_FILENAME).exists():
print(
"\n{green}[*] Verifying main SQL index and running any migrations needed...{reset}".format(
**ANSI
)
)
else:
print(
"\n{green}[+] Building main SQL index and running initial migrations...{reset}".format(
**ANSI
)
)
DATABASE_FILE = out_dir / SQL_INDEX_FILENAME
for migration_line in apply_migrations(out_dir):
print(f" {migration_line}")
assert DATABASE_FILE.exists()
print()
print(f" √ ./{DATABASE_FILE.relative_to(OUTPUT_DIR)}")
# from django.contrib.auth.models import User
# if IS_TTY and not User.objects.filter(is_superuser=True).exists():
# print('{green}[+] Creating admin user account...{reset}'.format(**ANSI))
# call_command("createsuperuser", interactive=True)
print()
print(
"{green}[*] Checking links from indexes and archive folders (safe to Ctrl+C)...{reset}".format(
**ANSI
)
)
all_links = Snapshot.objects.none()
pending_links: Dict[str, Link] = {}
if existing_index:
all_links = load_main_index(out_dir=out_dir, warn=False)
print(
" √ Loaded {} links from existing main index.".format(all_links.count())
)
if quick:
print(" > Skipping full snapshot directory check (quick mode)")
else:
try:
# Links in data folders that dont match their timestamp
fixed, cant_fix = fix_invalid_folder_locations(out_dir=out_dir)
if fixed:
print(
" {lightyellow}√ Fixed {} data directory locations that didn't match their link timestamps.{reset}".format(
len(fixed), **ANSI
)
)
if cant_fix:
print(
" {lightyellow}! Could not fix {} data directory locations due to conflicts with existing folders.{reset}".format(
len(cant_fix), **ANSI
)
)
# Links in JSON index but not in main index
orphaned_json_links = {
link.url: link
for link in parse_json_main_index(out_dir)
if not all_links.filter(url=link.url).exists()
}
if orphaned_json_links:
pending_links.update(orphaned_json_links)
print(
" {lightyellow}√ Added {} orphaned links from existing JSON index...{reset}".format(
len(orphaned_json_links), **ANSI
)
)
# Links in data dir indexes but not in main index
orphaned_data_dir_links = {
link.url: link
for link in parse_json_links_details(out_dir)
if not all_links.filter(url=link.url).exists()
}
if orphaned_data_dir_links:
pending_links.update(orphaned_data_dir_links)
print(
" {lightyellow}√ Added {} orphaned links from existing archive directories.{reset}".format(
len(orphaned_data_dir_links), **ANSI
)
)
# Links in invalid/duplicate data dirs
invalid_folders = {
folder: link
for folder, link in get_invalid_folders(
all_links, out_dir=out_dir
).items()
}
if invalid_folders:
print(
" {lightyellow}! Skipped adding {} invalid link data directories.{reset}".format(
len(invalid_folders), **ANSI
)
)
print(
" X "
+ "\n X ".join(
f"./{Path(folder).relative_to(OUTPUT_DIR)} {link}"
for folder, link in invalid_folders.items()
)
)
print()
print(
" {lightred}Hint:{reset} For more information about the link data directories that were skipped, run:".format(
**ANSI
)
)
print(" archivebox status")
print(" archivebox list --status=invalid")
except (KeyboardInterrupt, SystemExit):
stderr()
stderr(
"[x] Stopped checking archive directories due to Ctrl-C/SIGTERM",
color="red",
)
stderr(
" Your archive data is safe, but you should re-run `archivebox init` to finish the process later."
)
stderr()
stderr(
" {lightred}Hint:{reset} In the future you can run a quick init without checking dirs like so:".format(
**ANSI
)
)
stderr(" archivebox init --quick")
raise SystemExit(1)
write_main_index(list(pending_links.values()), out_dir=out_dir)
print(
"\n{green}----------------------------------------------------------------------{reset}".format(
**ANSI
)
)
if existing_index:
print(
"{green}[√] Done. Verified and updated the existing ArchiveBox collection.{reset}".format(
**ANSI
)
)
else:
# TODO: allow creating new supersuer via env vars on first init
# if config.HTTP_USER and config.HTTP_PASS:
# from django.contrib.auth.models import User
# User.objects.create_superuser(HTTP_USER, '', HTTP_PASS)
print(
"{green}[√] Done. A new ArchiveBox collection was initialized ({} links).{reset}".format(
len(all_links) + len(pending_links), **ANSI
)
)
json_index = out_dir / JSON_INDEX_FILENAME
html_index = out_dir / HTML_INDEX_FILENAME
index_name = f"{date.today()}_index_old"
if json_index.exists():
json_index.rename(f"{index_name}.json")
if html_index.exists():
html_index.rename(f"{index_name}.html")
if setup:
run_subcommand("setup", pwd=out_dir)
if Snapshot.objects.count() < 25: # hide the hints for experienced users
print()
print(
" {lightred}Hint:{reset} To view your archive index, run:".format(**ANSI)
)
print(" archivebox server # then visit http://127.0.0.1:8000")
print()
print(" To add new links, you can run:")
print(" archivebox add < ~/some/path/to/list_of_links.txt")
print()
print(" For more usage and examples, run:")
print(" archivebox help")
@enforce_types
def status(out_dir: Path = OUTPUT_DIR) -> None:
"""Print out some info and statistics about the archive collection"""
check_data_folder(out_dir=out_dir)
from core.models import Snapshot
from django.contrib.auth import get_user_model
User = get_user_model()
print("{green}[*] Scanning archive main index...{reset}".format(**ANSI))
print(ANSI["lightyellow"], f" {out_dir}/*", ANSI["reset"])
num_bytes, num_dirs, num_files = get_dir_size(
out_dir, recursive=False, pattern="index."
)
size = printable_filesize(num_bytes)
print(f" Index size: {size} across {num_files} files")
print()
links = load_main_index(out_dir=out_dir)
num_sql_links = links.count()
num_link_details = sum(1 for link in parse_json_links_details(out_dir=out_dir))
print(
f" > SQL Main Index: {num_sql_links} links".ljust(36),
f"(found in {SQL_INDEX_FILENAME})",
)
print(
f" > JSON Link Details: {num_link_details} links".ljust(36),
f"(found in {ARCHIVE_DIR_NAME}/*/index.json)",
)
print()
print("{green}[*] Scanning archive data directories...{reset}".format(**ANSI))
print(ANSI["lightyellow"], f" {ARCHIVE_DIR}/*", ANSI["reset"])
num_bytes, num_dirs, num_files = get_dir_size(ARCHIVE_DIR)
size = printable_filesize(num_bytes)
print(f" Size: {size} across {num_files} files in {num_dirs} directories")
print(ANSI["black"])
num_indexed = len(get_indexed_folders(links, out_dir=out_dir))
num_archived = len(get_archived_folders(links, out_dir=out_dir))
num_unarchived = len(get_unarchived_folders(links, out_dir=out_dir))
print(f" > indexed: {num_indexed}".ljust(36), f"({get_indexed_folders.__doc__})")
print(
f" > archived: {num_archived}".ljust(36),
f"({get_archived_folders.__doc__})",
)
print(
f" > unarchived: {num_unarchived}".ljust(36),
f"({get_unarchived_folders.__doc__})",
)
num_present = len(get_present_folders(links, out_dir=out_dir))
num_valid = len(get_valid_folders(links, out_dir=out_dir))
print()
print(f" > present: {num_present}".ljust(36), f"({get_present_folders.__doc__})")
print(f" > valid: {num_valid}".ljust(36), f"({get_valid_folders.__doc__})")
duplicate = get_duplicate_folders(links, out_dir=out_dir)
orphaned = get_orphaned_folders(links, out_dir=out_dir)
corrupted = get_corrupted_folders(links, out_dir=out_dir)
unrecognized = get_unrecognized_folders(links, out_dir=out_dir)
num_invalid = len({**duplicate, **orphaned, **corrupted, **unrecognized})
print(
f" > invalid: {num_invalid}".ljust(36), f"({get_invalid_folders.__doc__})"
)
print(
f" > duplicate: {len(duplicate)}".ljust(36),
f"({get_duplicate_folders.__doc__})",
)
print(
f" > orphaned: {len(orphaned)}".ljust(36),
f"({get_orphaned_folders.__doc__})",
)
print(
f" > corrupted: {len(corrupted)}".ljust(36),
f"({get_corrupted_folders.__doc__})",
)
print(
f" > unrecognized: {len(unrecognized)}".ljust(36),
f"({get_unrecognized_folders.__doc__})",
)
print(ANSI["reset"])
if num_indexed:
print(
" {lightred}Hint:{reset} You can list link data directories by status like so:".format(
**ANSI
)
)
print(
" archivebox list --status=<status> (e.g. indexed, corrupted, archived, etc.)"
)
if orphaned:
print(
" {lightred}Hint:{reset} To automatically import orphaned data directories into the main index, run:".format(
**ANSI
)
)
print(" archivebox init")
if num_invalid:
print(
" {lightred}Hint:{reset} You may need to manually remove or fix some invalid data directories, afterwards make sure to run:".format(
**ANSI
)
)
print(" archivebox init")
print()
print(
"{green}[*] Scanning recent archive changes and user logins:{reset}".format(
**ANSI
)
)
print(ANSI["lightyellow"], f" {LOGS_DIR}/*", ANSI["reset"])
users = get_admins().values_list("username", flat=True)
print(f' UI users {len(users)}: {", ".join(users)}')
last_login = User.objects.order_by("last_login").last()
if last_login:
print(
f" Last UI login: {last_login.username} @ {str(last_login.last_login)[:16]}"
)
last_updated = Snapshot.objects.order_by("updated").last()
if last_updated:
print(f" Last changes: {str(last_updated.updated)[:16]}")
if not users:
print()
print(
" {lightred}Hint:{reset} You can create an admin user by running:".format(
**ANSI
)
)
print(" archivebox manage createsuperuser")
print()
for snapshot in links.order_by("-updated")[:10]:
if not snapshot.updated:
continue
print(
ANSI["black"],
(
f" > {str(snapshot.updated)[:16]} "
f'[{snapshot.num_outputs} {("X", "√")[snapshot.is_archived]} {printable_filesize(snapshot.archive_size)}] '
f'"{snapshot.title}": {snapshot.url}'
)[: TERM_WIDTH()],
ANSI["reset"],
)
print(ANSI["black"], " ...", ANSI["reset"])
@enforce_types
def oneshot(url: str, extractors: str = "", out_dir: Path = OUTPUT_DIR):
"""
Create a single URL archive folder with an index.json and index.html, and all the archive method outputs.
You can run this to archive single pages without needing to create a whole collection with archivebox init.
"""
oneshot_link, _ = parse_links_memory([url])
if len(oneshot_link) > 1:
stderr("[X] You should pass a single url to the oneshot command", color="red")
raise SystemExit(2)
methods = extractors.split(",") if extractors else ignore_methods(["title"])
archive_link(oneshot_link[0], out_dir=out_dir, methods=methods)
return oneshot_link
@enforce_types
def add(
urls: Union[str, List[str]],
tag: str = "",
depth: int = 0,
update: bool = not ONLY_NEW,
update_all: bool = False,
index_only: bool = False,
overwrite: bool = False,
# duplicate: bool=False, # TODO: reuse the logic from admin.py resnapshot to allow adding multiple snapshots by appending timestamp automatically
init: bool = False,
extractors: str = "",
parser: str = "auto",
out_dir: Path = OUTPUT_DIR,
) -> List[Link]:
"""Add a new URL or list of URLs to your archive"""
from core.models import Tag
assert depth in (0, 1), "Depth must be 0 or 1 (depth >1 is not supported yet)"
extractors = extractors.split(",") if extractors else []
if init:
run_subcommand("init", stdin=None, pwd=out_dir)
# Load list of links from the existing index
check_data_folder(out_dir=out_dir)
check_dependencies()
new_links: List[Link] = []
all_links = load_main_index(out_dir=out_dir)
log_importing_started(urls=urls, depth=depth, index_only=index_only)
if isinstance(urls, str):
# save verbatim stdin to sources
write_ahead_log = save_text_as_source(
urls, filename="{ts}-import.txt", out_dir=out_dir
)
elif isinstance(urls, list):
# save verbatim args to sources
write_ahead_log = save_text_as_source(
"\n".join(urls), filename="{ts}-import.txt", out_dir=out_dir
)
new_links += parse_links_from_source(write_ahead_log, root_url=None, parser=parser)
# If we're going one level deeper, download each link and look for more links
new_links_depth = []
if new_links and depth == 1:
log_crawl_started(new_links)
for new_link in new_links:
try:
downloaded_file = save_file_as_source(
new_link.url,
filename=f"{new_link.timestamp}-crawl-{new_link.domain}.txt",
out_dir=out_dir,
)
new_links_depth += parse_links_from_source(
downloaded_file, root_url=new_link.url
)
except Exception as err:
stderr(
"[!] Failed to get contents of URL {new_link.url}", err, color="red"
)
imported_links = list(
{link.url: link for link in (new_links + new_links_depth)}.values()
)
new_links = dedupe_links(all_links, imported_links)
write_main_index(links=new_links, out_dir=out_dir)
all_links = load_main_index(out_dir=out_dir)
if index_only:
# mock archive all the links using the fake index_only extractor method in order to update their state
if overwrite:
archive_links(
imported_links,
overwrite=overwrite,
methods=["index_only"],
out_dir=out_dir,
)
else:
archive_links(
new_links, overwrite=False, methods=["index_only"], out_dir=out_dir
)
else:
# fully run the archive extractor methods for each link
archive_kwargs = {
"out_dir": out_dir,
}
if extractors:
archive_kwargs["methods"] = extractors
stderr()
ts = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
if update:
stderr(
f"[*] [{ts}] Archiving + updating {len(imported_links)}/{len(all_links)}",
len(imported_links),
"URLs from added set...",
color="green",
)
archive_links(imported_links, overwrite=overwrite, **archive_kwargs)
elif update_all:
stderr(
f"[*] [{ts}] Archiving + updating {len(all_links)}/{len(all_links)}",
len(all_links),
"URLs from entire library...",
color="green",
)
archive_links(all_links, overwrite=overwrite, **archive_kwargs)
elif overwrite:
stderr(
f"[*] [{ts}] Archiving + overwriting {len(imported_links)}/{len(all_links)}",
len(imported_links),
"URLs from added set...",
color="green",
)
archive_links(imported_links, overwrite=True, **archive_kwargs)
elif new_links:
stderr(
f"[*] [{ts}] Archiving {len(new_links)}/{len(all_links)} URLs from added set...",
color="green",
)
archive_links(new_links, overwrite=False, **archive_kwargs)
# add any tags to imported links
tags = [
Tag.objects.get_or_create(name=name.strip())[0]
for name in tag.split(",")
if name.strip()
]
if tags:
for link in imported_links:
snapshot = link.as_snapshot()
snapshot.tags.add(*tags)
snapshot.tags_str(nocache=True)
snapshot.save()
# print(f' √ Tagged {len(imported_links)} Snapshots with {len(tags)} tags {tags_str}')
return all_links
@enforce_types
def remove(
filter_str: Optional[str] = None,
filter_patterns: Optional[List[str]] = None,
filter_type: str = "exact",
snapshots: Optional[QuerySet] = None,
after: Optional[float] = None,
before: Optional[float] = None,
yes: bool = False,
delete: bool = False,
out_dir: Path = OUTPUT_DIR,
) -> List[Link]:
"""Remove the specified URLs from the archive"""
check_data_folder(out_dir=out_dir)
if snapshots is None:
if filter_str and filter_patterns:
stderr(
"[X] You should pass either a pattern as an argument, "
"or pass a list of patterns via stdin, but not both.\n",
color="red",
)
raise SystemExit(2)
elif not (filter_str or filter_patterns):
stderr(
"[X] You should pass either a pattern as an argument, "
"or pass a list of patterns via stdin.",
color="red",
)
stderr()
hint(
(
"To remove all urls you can run:",
'archivebox remove --filter-type=regex ".*"',
)
)
stderr()
raise SystemExit(2)
elif filter_str:
filter_patterns = [ptn.strip() for ptn in filter_str.split("\n")]
list_kwargs = {
"filter_patterns": filter_patterns,
"filter_type": filter_type,
"after": after,
"before": before,
}
if snapshots:
list_kwargs["snapshots"] = snapshots
log_list_started(filter_patterns, filter_type)
timer = TimedProgress(360, prefix=" ")
try:
snapshots = list_links(**list_kwargs)
finally:
timer.end()
if not snapshots.exists():
log_removal_finished(0, 0)
raise SystemExit(1)
log_links = [link.as_link() for link in snapshots]
log_list_finished(log_links)
log_removal_started(log_links, yes=yes, delete=delete)
timer = TimedProgress(360, prefix=" ")
try:
for snapshot in snapshots:
if delete:
shutil.rmtree(snapshot.as_link().link_dir, ignore_errors=True)
finally:
timer.end()
to_remove = snapshots.count()
flush_search_index(snapshots=snapshots)
remove_from_sql_main_index(snapshots=snapshots, out_dir=out_dir)
all_snapshots = load_main_index(out_dir=out_dir)
log_removal_finished(all_snapshots.count(), to_remove)
return all_snapshots
@enforce_types
def update(
resume: Optional[float] = None,
only_new: bool = ONLY_NEW,
index_only: bool = False,
overwrite: bool = False,
filter_patterns_str: Optional[str] = None,
filter_patterns: Optional[List[str]] = None,
filter_type: Optional[str] = None,
status: Optional[str] = None,
after: Optional[str] = None,
before: Optional[str] = None,
extractors: str = "",
out_dir: Path = OUTPUT_DIR,
) -> List[Link]:
"""Import any new links from subscriptions and retry any previously failed/skipped links"""
check_data_folder(out_dir=out_dir)
check_dependencies()
new_links: List[Link] = [] # TODO: Remove input argument: only_new
extractors = extractors.split(",") if extractors else []
# Step 1: Filter for selected_links
matching_snapshots = list_links(
filter_patterns=filter_patterns,
filter_type=filter_type,
before=before,
after=after,
)
matching_folders = list_folders(
links=matching_snapshots,
status=status,
out_dir=out_dir,
)
all_links = [link for link in matching_folders.values() if link]
if index_only:
for link in all_links:
write_link_details(link, out_dir=out_dir, skip_sql_index=True)
index_links(all_links, out_dir=out_dir)
return all_links
# Step 2: Run the archive methods for each link
to_archive = new_links if only_new else all_links
if resume:
to_archive = [link for link in to_archive if link.timestamp >= str(resume)]
if not to_archive:
stderr("")
stderr(f"[√] Nothing found to resume after {resume}", color="green")
return all_links
archive_kwargs = {
"out_dir": out_dir,
}
if extractors:
archive_kwargs["methods"] = extractors
archive_links(to_archive, overwrite=overwrite, **archive_kwargs)
# Step 4: Re-write links index with updated titles, icons, and resources
all_links = load_main_index(out_dir=out_dir)
return all_links
@enforce_types
def list_all(
filter_patterns_str: Optional[str] = None,
filter_patterns: Optional[List[str]] = None,
filter_type: str = "exact",
status: Optional[str] = None,
after: Optional[float] = None,
before: Optional[float] = None,
sort: Optional[str] = None,
csv: Optional[str] = None,
json: bool = False,
html: bool = False,
with_headers: bool = False,
out_dir: Path = OUTPUT_DIR,
) -> Iterable[Link]:
"""List, filter, and export information about archive entries"""
check_data_folder(out_dir=out_dir)
if filter_patterns and filter_patterns_str:
stderr(
"[X] You should either pass filter patterns as an arguments "
"or via stdin, but not both.\n",
color="red",
)
raise SystemExit(2)
elif filter_patterns_str:
filter_patterns = filter_patterns_str.split("\n")
snapshots = list_links(
filter_patterns=filter_patterns,
filter_type=filter_type,
before=before,
after=after,
)
if sort:
snapshots = snapshots.order_by(sort)
folders = list_folders(
links=snapshots,
status=status,
out_dir=out_dir,
)
if json:
output = generate_json_index_from_links(folders.values(), with_headers)
elif html:
output = generate_index_from_links(folders.values(), with_headers)
elif csv:
output = links_to_csv(
folders.values(), cols=csv.split(","), header=with_headers
)
else:
output = printable_folders(folders, with_headers=with_headers)
print(output)
return folders
@enforce_types
def list_links(
snapshots: Optional[QuerySet] = None,
filter_patterns: Optional[List[str]] = None,
filter_type: str = "exact",
after: Optional[float] = None,
before: Optional[float] = None,
out_dir: Path = OUTPUT_DIR,
) -> Iterable[Link]:
check_data_folder(out_dir=out_dir)
if snapshots:
all_snapshots = snapshots
else:
all_snapshots = load_main_index(out_dir=out_dir)
if after is not None:
all_snapshots = all_snapshots.filter(timestamp__gte=after)
if before is not None:
all_snapshots = all_snapshots.filter(timestamp__lt=before)
if filter_patterns:
all_snapshots = snapshot_filter(all_snapshots, filter_patterns, filter_type)
if not all_snapshots:
stderr(
"[!] No Snapshots matched your filters:",
filter_patterns,
f"({filter_type})",
color="lightyellow",
)
return all_snapshots
@enforce_types
def list_folders(
links: List[Link], status: str, out_dir: Path = OUTPUT_DIR
) -> Dict[str, Optional[Link]]:
check_data_folder(out_dir=out_dir)
STATUS_FUNCTIONS = {
"indexed": get_indexed_folders,
"archived": get_archived_folders,
"unarchived": get_unarchived_folders,
"present": get_present_folders,
"valid": get_valid_folders,
"invalid": get_invalid_folders,
"duplicate": get_duplicate_folders,
"orphaned": get_orphaned_folders,
"corrupted": get_corrupted_folders,
"unrecognized": get_unrecognized_folders,
}
try:
return STATUS_FUNCTIONS[status](links, out_dir=out_dir)
except KeyError:
raise ValueError("Status not recognized.")
@enforce_types
def setup(out_dir: Path = OUTPUT_DIR) -> None:
"""Automatically install all ArchiveBox dependencies and extras"""
if not (out_dir / ARCHIVE_DIR_NAME).exists():
run_subcommand("init", stdin=None, pwd=out_dir)
setup_django(out_dir=out_dir, check_db=True)
from core.models import User
if not User.objects.filter(is_superuser=True).exists():
stderr("\n[+] Creating new admin user for the Web UI...", color="green")
run_subcommand("manage", subcommand_args=["createsuperuser"], pwd=out_dir)
stderr(
"\n[+] Installing enabled ArchiveBox dependencies automatically...",
color="green",
)
stderr("\n Installing YOUTUBEDL_BINARY automatically using pip...")
if YOUTUBEDL_VERSION:
print(f"{YOUTUBEDL_VERSION} is already installed", YOUTUBEDL_BINARY)
else:
try:
run_shell(
[
PYTHON_BINARY,
"-m",
"pip",
"install",
"--upgrade",
"--no-cache-dir",
"--no-warn-script-location",
"youtube_dl",
],
capture_output=False,
cwd=out_dir,
)
pkg_path = (
run_shell(
[
PYTHON_BINARY,
"-m",
"pip",
"show",
"youtube_dl",
],
capture_output=True,
text=True,
cwd=out_dir,
)
.stdout.decode()
.split("Location: ")[-1]
.split("\n", 1)[0]
)
NEW_YOUTUBEDL_BINARY = Path(pkg_path) / "youtube_dl" / "__main__.py"
os.chmod(NEW_YOUTUBEDL_BINARY, 0o777)
assert (
NEW_YOUTUBEDL_BINARY.exists()
), f"youtube_dl must exist inside {pkg_path}"
config(
f"YOUTUBEDL_BINARY={NEW_YOUTUBEDL_BINARY}", set=True, out_dir=out_dir
)
except BaseException as e: # lgtm [py/catch-base-exception]
stderr(f"[X] Failed to install python packages: {e}", color="red")
raise SystemExit(1)
stderr("\n Installing CHROME_BINARY automatically using playwright...")
if CHROME_VERSION:
print(f"{CHROME_VERSION} is already installed", CHROME_BINARY)
else:
try:
run_shell(
[
PYTHON_BINARY,
"-m",
"pip",
"install",
"--upgrade",
"--no-cache-dir",
"--no-warn-script-location",
"playwright",
],
capture_output=False,
cwd=out_dir,
)
run_shell(
[PYTHON_BINARY, "-m", "playwright", "install", "chromium"],
capture_output=False,
cwd=out_dir,
)
proc = run_shell(
[
PYTHON_BINARY,
"-c",
"from playwright.sync_api import sync_playwright; print(sync_playwright().start().chromium.executable_path)",
],
capture_output=True,
text=True,
cwd=out_dir,
)
NEW_CHROME_BINARY = (
proc.stdout.decode().strip()
if isinstance(proc.stdout, bytes)
else proc.stdout.strip()
)
assert NEW_CHROME_BINARY and len(
NEW_CHROME_BINARY
), "CHROME_BINARY must contain a path"
config(f"CHROME_BINARY={NEW_CHROME_BINARY}", set=True, out_dir=out_dir)
except BaseException as e: # lgtm [py/catch-base-exception]
stderr(
f"[X] Failed to install chromium using playwright: {e.__class__.__name__} {e}",
color="red",
)
raise SystemExit(1)
stderr(
"\n Installing SINGLEFILE_BINARY, READABILITY_BINARY, MERCURY_BINARY automatically using npm..."
)
if not NODE_VERSION:
stderr(
"[X] You must first install node using your system package manager",
color="red",
)
hint(
[
"curl -sL https://deb.nodesource.com/setup_15.x | sudo -E bash -",
"or to disable all node-based modules run: archivebox config --set USE_NODE=False",
]
)
raise SystemExit(1)
if all((SINGLEFILE_VERSION, READABILITY_VERSION, MERCURY_VERSION)):
print(
"SINGLEFILE_BINARY, READABILITY_BINARY, and MERCURURY_BINARY are already installed"
)
else:
try:
# clear out old npm package locations
paths = (
out_dir / "package.json",
out_dir / "package_lock.json",
out_dir / "node_modules",
)
for path in paths:
if path.is_dir():
shutil.rmtree(path, ignore_errors=True)
elif path.is_file():
os.remove(path)
shutil.copyfile(
PACKAGE_DIR / "package.json", out_dir / "package.json"
) # copy the js requirements list from the source install into the data dir
# lets blindly assume that calling out to npm via shell works reliably cross-platform 🤡 (until proven otherwise via support tickets)
run_shell(
[
"npm",
"install",
"--prefix",
str(out_dir), # force it to put the node_modules dir in this folder
"--force", # overwrite any existing node_modules
"--no-save", # don't bother saving updating the package.json or package-lock.json file
"--no-audit", # don't bother checking for newer versions with security vuln fixes
"--no-fund", # hide "please fund our project" messages
"--loglevel",
"error", # only show erros (hide warn/info/debug) during installation
# these args are written in blood, change with caution
],
capture_output=False,
cwd=out_dir,
)
os.remove(out_dir / "package.json")
except BaseException as e: # lgtm [py/catch-base-exception]
stderr(f"[X] Failed to install npm packages: {e}", color="red")
hint(f"Try deleting {out_dir}/node_modules and running it again")
raise SystemExit(1)
stderr("\n[√] Set up ArchiveBox and its dependencies successfully.", color="green")
run_shell(
[PYTHON_BINARY, ARCHIVEBOX_BINARY, "--version"],
capture_output=False,
cwd=out_dir,
)
@enforce_types
def config(
config_options_str: Optional[str] = None,
config_options: Optional[List[str]] = None,
get: bool = False,
set: bool = False,
reset: bool = False,
out_dir: Path = OUTPUT_DIR,
) -> None:
"""Get and set your ArchiveBox project configuration values"""
check_data_folder(out_dir=out_dir)
if config_options and config_options_str:
stderr(
"[X] You should either pass config values as an arguments "
"or via stdin, but not both.\n",
color="red",
)
raise SystemExit(2)
elif config_options_str:
config_options = config_options_str.split("\n")
config_options = config_options or []
no_args = not (get or set or reset or config_options)
matching_config: ConfigDict = {}
if get or no_args:
if config_options:
config_options = [get_real_name(key) for key in config_options]
matching_config = {
key: CONFIG[key] for key in config_options if key in CONFIG
}
failed_config = [key for key in config_options if key not in CONFIG]
if failed_config:
stderr()
stderr("[X] These options failed to get", color="red")
stderr(" {}".format("\n ".join(config_options)))
raise SystemExit(1)
else:
matching_config = CONFIG
print(printable_config(matching_config))
raise SystemExit(not matching_config)
elif set:
new_config = {}
failed_options = []
for line in config_options:
if line.startswith("#") or not line.strip():
continue
if "=" not in line:
stderr("[X] Config KEY=VALUE must have an = sign in it", color="red")
stderr(f" {line}")
raise SystemExit(2)
raw_key, val = line.split("=", 1)
raw_key = raw_key.upper().strip()
key = get_real_name(raw_key)
if key != raw_key:
stderr(
f"[i] Note: The config option {raw_key} has been renamed to {key}, please use the new name going forwards.",
color="lightyellow",
)
if key in CONFIG:
new_config[key] = val.strip()
else:
failed_options.append(line)
if new_config:
before = CONFIG
matching_config = write_config_file(new_config, out_dir=OUTPUT_DIR)
after = load_all_config()
print(printable_config(matching_config))
side_effect_changes: ConfigDict = {}
for key, val in after.items():
if (
key in USER_CONFIG
and (before[key] != after[key])
and (key not in matching_config)
):
side_effect_changes[key] = after[key]
if side_effect_changes:
stderr()
stderr(
"[i] Note: This change also affected these other options that depended on it:",
color="lightyellow",
)
print(
" {}".format(
printable_config(side_effect_changes, prefix=" ")
)
)
if failed_options:
stderr()
stderr("[X] These options failed to set (check for typos):", color="red")
stderr(" {}".format("\n ".join(failed_options)))
raise SystemExit(1)
elif reset:
stderr("[X] This command is not implemented yet.", color="red")
stderr(" Please manually remove the relevant lines from your config file:")
stderr(f" {CONFIG_FILE}")
raise SystemExit(2)
else:
stderr(
"[X] You must pass either --get or --set, or no arguments to get the whole config.",
color="red",
)
stderr(" archivebox config")
stderr(" archivebox config --get SOME_KEY")
stderr(" archivebox config --set SOME_KEY=SOME_VALUE")
raise SystemExit(2)
@enforce_types
def schedule(
add: bool = False,
show: bool = False,
clear: bool = False,
foreground: bool = False,
run_all: bool = False,
quiet: bool = False,
every: Optional[str] = None,
depth: int = 0,
overwrite: bool = False,
update: bool = not ONLY_NEW,
import_path: Optional[str] = None,
out_dir: Path = OUTPUT_DIR,
):
"""Set ArchiveBox to regularly import URLs at specific times using cron"""
check_data_folder(out_dir=out_dir)
Path(LOGS_DIR).mkdir(exist_ok=True)
cron = CronTab(user=True)
cron = dedupe_cron_jobs(cron)
if clear:
print(cron.remove_all(comment=CRON_COMMENT))
cron.write()
raise SystemExit(0)
existing_jobs = list(cron.find_comment(CRON_COMMENT))
if every or add:
every = every or "day"
quoted = lambda s: f'"{s}"' if (s and " " in str(s)) else str(s)
cmd = [
"cd",
quoted(out_dir),
"&&",
quoted(ARCHIVEBOX_BINARY),
*(
[
"add",
*(["--overwrite"] if overwrite else []),
*(["--update"] if update else []),
f"--depth={depth}",
f'"{import_path}"',
]
if import_path
else ["update"]
),
">>",
quoted(Path(LOGS_DIR) / "schedule.log"),
"2>&1",
]
new_job = cron.new(command=" ".join(cmd), comment=CRON_COMMENT)
if every in ("minute", "hour", "day", "month", "year"):
set_every = getattr(new_job.every(), every)
set_every()
elif CronSlices.is_valid(every):
new_job.setall(every)
else:
stderr(
"{red}[X] Got invalid timeperiod for cron task.{reset}".format(**ANSI)
)
stderr(" It must be one of minute/hour/day/month")
stderr(" or a quoted cron-format schedule like:")
stderr(
" archivebox init --every=day --depth=1 https://example.com/some/rss/feed.xml"
)
stderr(
' archivebox init --every="0/5 * * * *" --depth=1 https://example.com/some/rss/feed.xml'
)
raise SystemExit(1)
cron = dedupe_cron_jobs(cron)
cron.write()
total_runs = sum(j.frequency_per_year() for j in cron)
existing_jobs = list(cron.find_comment(CRON_COMMENT))
print()
print(
"{green}[√] Scheduled new ArchiveBox cron job for user: {} ({} jobs are active).{reset}".format(
USER, len(existing_jobs), **ANSI
)
)
print(
"\n".join(
f" > {cmd}" if str(cmd) == str(new_job) else f" {cmd}"
for cmd in existing_jobs
)
)
if total_runs > 60 and not quiet:
stderr()
stderr(
"{lightyellow}[!] With the current cron config, ArchiveBox is estimated to run >{} times per year.{reset}".format(
total_runs, **ANSI
)
)
stderr(" Congrats on being an enthusiastic internet archiver! 👌")
stderr()
stderr(
" Make sure you have enough storage space available to hold all the data."
)
stderr(
" Using a compressed/deduped filesystem like ZFS is recommended if you plan on archiving a lot."
)
stderr("")
elif show:
if existing_jobs:
print("\n".join(str(cmd) for cmd in existing_jobs))
else:
stderr(
"{red}[X] There are no ArchiveBox cron jobs scheduled for your user ({}).{reset}".format(
USER, **ANSI
)
)
stderr(" To schedule a new job, run:")
stderr(
" archivebox schedule --every=[timeperiod] --depth=1 https://example.com/some/rss/feed.xml"
)
raise SystemExit(0)
cron = CronTab(user=True)
cron = dedupe_cron_jobs(cron)
existing_jobs = list(cron.find_comment(CRON_COMMENT))
if foreground or run_all:
if not existing_jobs:
stderr(
"{red}[X] You must schedule some jobs first before running in foreground mode.{reset}".format(
**ANSI
)
)
stderr(
" archivebox schedule --every=hour --depth=1 https://example.com/some/rss/feed.xml"
)
raise SystemExit(1)
print(
"{green}[*] Running {} ArchiveBox jobs in foreground task scheduler...{reset}".format(
len(existing_jobs), **ANSI
)
)
if run_all:
try:
for job in existing_jobs:
sys.stdout.write(
f' > {job.command.split("/archivebox ")[0].split(" && ")[0]}\n'
)
sys.stdout.write(
f' > {job.command.split("/archivebox ")[-1].split(" >> ")[0]}'
)
sys.stdout.flush()
job.run()
sys.stdout.write(
f'\r √ {job.command.split("/archivebox ")[-1]}\n'
)
except KeyboardInterrupt:
print("\n{green}[√] Stopped.{reset}".format(**ANSI))
raise SystemExit(1)
if foreground:
try:
for job in existing_jobs:
print(
f' > {job.command.split("/archivebox ")[-1].split(" >> ")[0]}'
)
for result in cron.run_scheduler():
print(result)
except KeyboardInterrupt:
print("\n{green}[√] Stopped.{reset}".format(**ANSI))
raise SystemExit(1)
@enforce_types
def server(
runserver_args: Optional[List[str]] = None,
reload: bool = False,
debug: bool = False,
init: bool = False,
quick_init: bool = False,
createsuperuser: bool = False,
out_dir: Path = OUTPUT_DIR,
) -> None:
"""Run the ArchiveBox HTTP server"""
runserver_args = runserver_args or []
if init:
run_subcommand("init", stdin=None, pwd=out_dir)
print()
elif quick_init:
run_subcommand("init", subcommand_args=["--quick"], stdin=None, pwd=out_dir)
print()
if createsuperuser:
run_subcommand("manage", subcommand_args=["createsuperuser"], pwd=out_dir)
print()
# setup config for django runserver
from . import config
config.SHOW_PROGRESS = False
config.DEBUG = config.DEBUG or debug
check_data_folder(out_dir=out_dir)
from django.contrib.auth.models import User
from django.core.management import call_command
print("{green}[+] Starting ArchiveBox webserver...{reset}".format(**ANSI))
print(" > Logging errors to ./logs/errors.log")
if not User.objects.filter(is_superuser=True).exists():
print(
"{lightyellow}[!] No admin users exist yet, you will not be able to edit links in the UI.{reset}".format(
**ANSI
)
)
print()
print(" To create an admin user, run:")
print(" archivebox manage createsuperuser")
print()
# fallback to serving staticfiles insecurely with django when DEBUG=False
if not config.DEBUG:
runserver_args.append("--insecure") # TODO: serve statics w/ nginx instead
# toggle autoreloading when archivebox code changes (it's on by default)
if not reload:
runserver_args.append("--noreload")
config.SHOW_PROGRESS = False
config.DEBUG = config.DEBUG or debug
call_command("runserver", *runserver_args)
@enforce_types
def manage(args: Optional[List[str]] = None, out_dir: Path = OUTPUT_DIR) -> None:
"""Run an ArchiveBox Django management command"""
check_data_folder(out_dir=out_dir)
from django.core.management import execute_from_command_line
if (args and "createsuperuser" in args) and (IN_DOCKER and not IS_TTY):
stderr(
"[!] Warning: you need to pass -it to use interactive commands in docker",
color="lightyellow",
)
stderr(
" docker run -it archivebox manage {}".format(" ".join(args or ["..."])),
color="lightyellow",
)
stderr()
execute_from_command_line([f"{ARCHIVEBOX_BINARY} manage", *(args or ["help"])])
@enforce_types
def shell(out_dir: Path = OUTPUT_DIR) -> None:
"""Enter an interactive ArchiveBox Django shell"""
check_data_folder(out_dir=out_dir)
from django.core.management import call_command
call_command("shell_plus")
|
filter | qa_freq_xlating_fft_filter | #!/usr/bin/env python
#
# Copyright 2008,2010,2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import cmath
import math
from gnuradio import blocks, filter, gr, gr_unittest
def fir_filter(x, taps, decim=1):
y = []
x2 = (len(taps) - 1) * [
0,
] + x
for i in range(0, len(x), decim):
yi = 0
for j in range(len(taps)):
yi += taps[len(taps) - 1 - j] * x2[i + j]
y.append(yi)
return y
def sig_source_s(samp_rate, freq, amp, N):
t = [float(x) / samp_rate for x in range(N)]
y = [int(100 * math.sin(2.0 * math.pi * freq * x)) for x in t]
return y
def sig_source_c(samp_rate, freq, amp, N):
t = [float(x) / samp_rate for x in range(N)]
y = [
math.cos(2.0 * math.pi * freq * x) + 1j * math.sin(2.0 * math.pi * freq * x)
for x in t
]
return y
def mix(lo, data):
y = [lo_i * data_i for lo_i, data_i in zip(lo, data)]
return y
class test_freq_xlating_filter(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def generate_ccf_source(self):
self.fs = fs = 1
self.fc = fc = 0.3
self.bw = bw = 0.1
self.taps = filter.firdes.low_pass(1, fs, bw, bw / 4)
times = list(range(1024))
self.src_data = [
cmath.exp(-2j * cmath.pi * fc / fs * (t / 100.0)) for t in times
]
def generate_ccc_source(self):
self.fs = fs = 1
self.fc = fc = 0.3
self.bw = bw = 0.1
self.taps = filter.firdes.complex_band_pass(1, fs, -bw / 2, bw / 2, bw / 4)
times = list(range(1024))
self.src_data = [
cmath.exp(-2j * cmath.pi * fc / fs * (t / 100.0)) for t in times
]
def assert_fft_ok(self, expected_result, result_data):
expected_result = expected_result[: len(result_data)]
self.assertComplexTuplesAlmostEqual(expected_result, result_data, places=5)
def test_fft_filter_ccf_001(self):
self.generate_ccf_source()
decim = 1
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_c(self.src_data)
op = filter.freq_xlating_fft_filter_ccc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assert_fft_ok(expected_data, result_data)
def test_fft_filter_ccf_002(self):
self.generate_ccf_source()
decim = 4
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_c(self.src_data)
op = filter.freq_xlating_fft_filter_ccc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assert_fft_ok(expected_data, result_data)
def test_fft_filter_ccc_001(self):
self.generate_ccc_source()
decim = 1
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_c(self.src_data)
op = filter.freq_xlating_fft_filter_ccc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assert_fft_ok(expected_data, result_data)
def test_fft_filter_ccc_002(self):
self.generate_ccc_source()
decim = 4
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_c(self.src_data)
op = filter.freq_xlating_fft_filter_ccc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assert_fft_ok(expected_data, result_data)
if __name__ == "__main__":
gr_unittest.run(test_freq_xlating_filter)
|
autokey | common | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Chris Dekter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
XDG_CONFIG_HOME = os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
# Runtime dir falls back to cache dir, as a fallback is suggested by the spec
XDG_CACHE_HOME = os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
XDG_DATA_HOME = os.environ.get("XDG_DATA_HOME", os.path.expanduser("~/.local/share"))
CONFIG_DIR = os.path.join(XDG_CONFIG_HOME, "autokey")
RUN_DIR = os.path.join(os.environ.get("XDG_RUNTIME_DIR", XDG_CACHE_HOME), "autokey")
DATA_DIR = os.path.join(XDG_DATA_HOME, "autokey")
# The desktop file to start autokey during login is placed here
AUTOSTART_DIR = os.path.join(XDG_CONFIG_HOME, "autostart")
LOCK_FILE = os.path.join(RUN_DIR, "autokey.pid")
APP_NAME = "autokey"
CATALOG = ""
VERSION = "0.96.0"
HOMEPAGE = "https://github.com/autokey/autokey"
AUTHOR = "Chris Dekter"
AUTHOR_EMAIL = "cdekter@gmail.com"
MAINTAINER = "GuoCi"
MAINTAINER_EMAIL = "guociz@gmail.com"
BUG_EMAIL = "guociz@gmail.com"
FAQ_URL = "https://github.com/autokey/autokey/wiki/FAQ"
API_URL = "https://autokey.github.io/"
HELP_URL = "https://github.com/autokey/autokey/wiki/Troubleshooting"
BUG_URL = HOMEPAGE + "/issues"
ICON_FILE = "autokey"
ICON_FILE_NOTIFICATION = "autokey-status"
ICON_FILE_NOTIFICATION_DARK = "autokey-status-dark"
ICON_FILE_NOTIFICATION_ERROR = "autokey-status-error"
USING_QT = False
|
digital | qa_meas_evm_cc | #!/usr/bin/env python
#
# Copyright 2020 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import random
import numpy
from gnuradio import blocks, channels, digital, gr, gr_unittest
class qa_meas_evm_cc(gr_unittest.TestCase):
def setUp(self):
random.seed(987654)
self.tb = gr.top_block()
self.num_data = num_data = 1000
def tearDown(self):
self.tb = None
def test_qpsk(self):
# set up fg
expected_result = list(numpy.zeros((self.num_data,)))
self.cons = cons = digital.constellation_qpsk().base()
self.data = data = [
random.randrange(len(cons.points())) for x in range(self.num_data)
]
self.symbols = symbols = numpy.squeeze([cons.map_to_points_v(i) for i in data])
evm = digital.meas_evm_cc(cons, digital.evm_measurement_t.EVM_PERCENT)
vso = blocks.vector_source_c(symbols, False, 1, [])
# mc = blocks.multiply_const_cc(3.0+2.0j)
vsi = blocks.vector_sink_f()
self.tb.connect(vso, evm, vsi)
self.tb.run()
# check data
output_data = vsi.data()
self.assertEqual(expected_result, output_data)
def test_qpsk_nonzeroevm(self):
# set up fg
expected_result = list(numpy.zeros((self.num_data,)))
self.cons = cons = digital.constellation_qpsk().base()
self.data = data = [
random.randrange(len(cons.points())) for x in range(self.num_data)
]
self.symbols = symbols = numpy.squeeze([cons.map_to_points_v(i) for i in data])
evm = digital.meas_evm_cc(cons, digital.evm_measurement_t.EVM_PERCENT)
vso = blocks.vector_source_c(symbols, False, 1, [])
mc = blocks.multiply_const_cc(3.0 + 2.0j)
vsi = blocks.vector_sink_f()
self.tb.connect(vso, mc, evm, vsi)
self.tb.run()
# check data
output_data = vsi.data()
self.assertNotEqual(expected_result, output_data)
def test_qpsk_channel(self):
upper_bound = list(50.0 * numpy.ones((self.num_data,)))
lower_bound = list(0.0 * numpy.zeros((self.num_data,)))
self.cons = cons = digital.constellation_qpsk().base()
self.data = data = [
random.randrange(len(cons.points())) for x in range(self.num_data)
]
self.symbols = symbols = numpy.squeeze([cons.map_to_points_v(i) for i in data])
chan = channels.channel_model(
noise_voltage=0.1,
frequency_offset=0.0,
epsilon=1.0,
taps=[1.0 + 0.0j],
noise_seed=0,
block_tags=False,
)
evm = digital.meas_evm_cc(cons, digital.evm_measurement_t.EVM_PERCENT)
vso = blocks.vector_source_c(symbols, False, 1, [])
mc = blocks.multiply_const_cc(3.0 + 2.0j)
vsi = blocks.vector_sink_f()
self.tb.connect(vso, chan, evm, vsi)
self.tb.run()
# check data
output_data = vsi.data()
self.assertLess(output_data, upper_bound)
self.assertGreater(output_data, lower_bound)
def test_qam16_channel(self):
upper_bound = list(50.0 * numpy.ones((self.num_data,)))
lower_bound = list(0.0 * numpy.zeros((self.num_data,)))
self.cons = cons = digital.constellation_16qam().base()
self.data = data = [
random.randrange(len(cons.points())) for x in range(self.num_data)
]
self.symbols = symbols = numpy.squeeze([cons.map_to_points_v(i) for i in data])
chan = channels.channel_model(
noise_voltage=0.1,
frequency_offset=0.0,
epsilon=1.0,
taps=[1.0 + 0.0j],
noise_seed=0,
block_tags=False,
)
evm = digital.meas_evm_cc(cons, digital.evm_measurement_t.EVM_PERCENT)
vso = blocks.vector_source_c(symbols, False, 1, [])
mc = blocks.multiply_const_cc(3.0 + 2.0j)
vsi = blocks.vector_sink_f()
self.tb.connect(vso, chan, evm, vsi)
self.tb.run()
# check data
output_data = vsi.data()
self.assertLess(output_data, upper_bound)
self.assertGreater(output_data, lower_bound)
if __name__ == "__main__":
gr_unittest.run(qa_meas_evm_cc)
|
frescobaldi-app | bugreport | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Function for helping the user to report bugs.
"""
import appinfo
import debuginfo
import helpers
from PyQt5.QtCore import QUrl, QUrlQuery
def new_github_issue(title, body):
"""
Open a web brower on a page to create a new issue on GitHub.
Information about the versions of Frescobaldi and its dependencies will
be appended to the body.
The body will be sent in the query part of a URL. It is therefore advisable
not to make it too long. As of this writing, GitHub imposes a limit around
6000 characters.
"""
body += "\n\n" + debuginfo.version_info_string()
url = QUrl(appinfo.issues_url)
query = QUrlQuery()
query.addQueryItem("title", title)
query.addQueryItem("body", body)
url.setQuery(query)
helpers.openUrl(url)
def email(subject, body, recipient=None):
"""Opens the e-mail composer with the given subject and body, with version information added to it."""
subject = f"[{appinfo.appname} {appinfo.version}] {subject}"
body = "{}\n\n{}\n\n".format(debuginfo.version_info_string("\n"), body)
address = recipient or appinfo.maintainer_email
url = QUrl("mailto:" + address)
query = QUrlQuery()
query.addQueryItem("subject", subject)
query.addQueryItem("body", body)
url.setQuery(query)
helpers.openUrl(url, "email")
|
config | environment | # encoding: utf-8
"""CKAN environment configuration"""
from __future__ import annotations
import logging
import os
import warnings
from typing import Union, cast
import ckan.authz as authz
import ckan.lib.app_globals as app_globals
import ckan.lib.helpers as helpers
import ckan.lib.plugins as lib_plugins
import ckan.lib.search as search
import ckan.logic as logic
import ckan.model as model
import ckan.plugins as p
import pytz
import sqlalchemy.exc
from ckan.common import CKANConfig, config, config_declaration
from ckan.exceptions import CkanConfigurationException
from ckan.lib.i18n import build_js_translations
from ckan.lib.redis import is_redis_available
from ckan.lib.webassets_tools import register_core_assets, webassets_init
from ckan.types import Config
from sqlalchemy import engine_from_config, inspect
log = logging.getLogger(__name__)
# Suppress benign warning 'Unbuilt egg for setuptools'
warnings.simplefilter("ignore", UserWarning)
def load_environment(conf: Union[Config, CKANConfig]):
"""
Configure the Pylons environment via the ``pylons.config`` object. This
code should only need to be run once.
"""
os.environ["CKAN_CONFIG"] = cast(str, conf["__file__"])
valid_base_public_folder_names = ["public"]
static_files = conf.get("ckan.base_public_folder", "public")
conf["ckan.base_public_folder"] = static_files
if static_files not in valid_base_public_folder_names:
raise CkanConfigurationException(
"You provided an invalid value for ckan.base_public_folder. "
'Possible value is: "public".'
)
log.info("Loading static files from %s" % static_files)
# Initialize main CKAN config object
config.update(conf)
# Setup the SQLAlchemy database engine
# Suppress a couple of sqlalchemy warnings
msgs = [
"^Unicode type received non-unicode bind param value",
"^Did not recognize type 'BIGINT' of column 'size'",
"^Did not recognize type 'tsvector' of column 'search_vector'",
]
for msg in msgs:
warnings.filterwarnings("ignore", msg, sqlalchemy.exc.SAWarning)
# load all CKAN plugins
p.load_all()
# Check Redis availability
if not is_redis_available():
log.critical("Could not connect to Redis.")
app_globals.reset()
# Build JavaScript translations. Must be done after plugins have
# been loaded.
build_js_translations()
# A mapping of config settings that can be overridden by env vars.
# Note: Do not remove the following lines, they are used in the docs
# Start CONFIG_FROM_ENV_VARS
CONFIG_FROM_ENV_VARS: dict[str, str] = {
"sqlalchemy.url": "CKAN_SQLALCHEMY_URL",
"ckan.datastore.write_url": "CKAN_DATASTORE_WRITE_URL",
"ckan.datastore.read_url": "CKAN_DATASTORE_READ_URL",
"ckan.redis.url": "CKAN_REDIS_URL",
"solr_url": "CKAN_SOLR_URL",
"solr_user": "CKAN_SOLR_USER",
"solr_password": "CKAN_SOLR_PASSWORD",
"ckan.site_id": "CKAN_SITE_ID",
"ckan.site_url": "CKAN_SITE_URL",
"ckan.storage_path": "CKAN_STORAGE_PATH",
"ckan.datapusher.url": "CKAN_DATAPUSHER_URL",
"smtp.server": "CKAN_SMTP_SERVER",
"smtp.starttls": "CKAN_SMTP_STARTTLS",
"smtp.user": "CKAN_SMTP_USER",
"smtp.password": "CKAN_SMTP_PASSWORD",
"smtp.mail_from": "CKAN_SMTP_MAIL_FROM",
"ckan.max_resource_size": "CKAN_MAX_UPLOAD_SIZE_MB",
}
# End CONFIG_FROM_ENV_VARS
def update_config() -> None:
"""This code needs to be run when the config is changed to take those
changes into account. It is called whenever a plugin is loaded as the
plugin might have changed the config values (for instance it might
change ckan.site_url)"""
# read envvars before config declarations in order to apply normalization
# to the values, when declarations loaded
for option in CONFIG_FROM_ENV_VARS:
from_env = os.environ.get(CONFIG_FROM_ENV_VARS[option], None)
if from_env:
config[option] = from_env
config_declaration.setup()
config_declaration.make_safe(config)
config_declaration.normalize(config)
# these are collections of all template/public paths registered by
# extensions. Each call to `tk.add_template_directory` or
# `tk.add_public_directory` updates these collections. We have to reset
# them in order to remove templates/public files that came from plugins
# that were once enabled but are disabled right now.
config["plugin_template_paths"] = []
config["plugin_public_paths"] = []
# initialize webassets environment because plugins will register assets
# inside IConfigured.update_config
webassets_init()
for plugin in p.PluginImplementations(p.IConfigurer):
# must do update in place as this does not work:
# config = plugin.update_config(config)
plugin.update_config(config)
# register core assets here, giving plugins an opportunity to override core
# assets inside IConfigurer.update_config
register_core_assets()
_, errors = config_declaration.validate(config)
if errors:
if config.get("config.mode") == "strict":
msg = "\n".join(
"{}: {}".format(key, "; ".join(issues))
for key, issues in errors.items()
)
msg = "Invalid configuration values provided:\n" + msg
raise CkanConfigurationException(msg)
else:
for key, issues in errors.items():
log.warning(
"Invalid value for %s (%s): %s",
key,
config.get(key),
"; ".join(issues),
)
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
site_url = config.get("ckan.site_url")
if not site_url:
raise RuntimeError(
"ckan.site_url is not configured and it must have a value."
" Please amend your .ini file."
)
if not site_url.lower().startswith("http"):
raise RuntimeError(
"ckan.site_url should be a full URL, including the schema "
"(http or https)"
)
# Remove backslash from site_url if present
config["ckan.site_url"] = site_url.rstrip("/")
display_timezone = config.get("ckan.display_timezone")
if (
display_timezone
and display_timezone != "server"
and display_timezone not in pytz.all_timezones
):
raise CkanConfigurationException(
"ckan.display_timezone is not 'server' or a valid timezone"
)
# Init SOLR settings and check if the schema is compatible
# from ckan.lib.search import SolrSettings, check_solr_schema_version
# lib.search is imported here as we need the config enabled and parsed
search.SolrSettings.init(
config.get("solr_url"), config.get("solr_user"), config.get("solr_password")
)
search.check_solr_schema_version()
lib_plugins.reset_package_plugins()
lib_plugins.register_package_plugins()
lib_plugins.reset_group_plugins()
lib_plugins.register_group_plugins()
# initialise the globals
app_globals.app_globals._init()
helpers.load_plugin_helpers()
# Templates and CSS loading from configuration
valid_base_templates_folder_names = ["templates"]
templates = config.get("ckan.base_templates_folder")
config["ckan.base_templates_folder"] = templates
if templates not in valid_base_templates_folder_names:
raise CkanConfigurationException(
"You provided an invalid value for ckan.base_templates_folder. "
'Possible value is: "templates"".'
)
jinja2_templates_path = os.path.join(root, templates)
log.info("Loading templates from %s" % jinja2_templates_path)
template_paths = [jinja2_templates_path]
extra_template_paths = config.get("extra_template_paths")
if "plugin_template_paths" in config:
template_paths = config["plugin_template_paths"] + template_paths
if extra_template_paths:
# must be first for them to override defaults
template_paths = extra_template_paths.split(",") + template_paths
config["computed_template_paths"] = template_paths
# Enable pessimistic disconnect handling (added in SQLAlchemy 1.2)
# to eliminate database errors due to stale pooled connections
config.setdefault("sqlalchemy.pool_pre_ping", True)
# Initialize SQLAlchemy
engine = engine_from_config(config)
model.init_model(engine)
for plugin in p.PluginImplementations(p.IConfigurable):
plugin.configure(config)
# clear other caches
logic.clear_actions_cache()
logic.clear_validators_cache()
authz.clear_auth_functions_cache()
# Here we create the site user if they are not already in the database
user_table_exists = False
try:
user_table_exists = inspect(engine).has_table("user")
except sqlalchemy.exc.OperationalError:
log.debug("DB user table does not exist")
if user_table_exists:
try:
logic.get_action("get_site_user")({"ignore_auth": True}, {})
except sqlalchemy.exc.ProgrammingError as e:
if "UndefinedColumn" in repr(e.orig):
log.debug("Old user model detected")
else:
raise
except sqlalchemy.exc.IntegrityError:
# Race condition, user already exists.
log.debug("Site user already exists")
# Close current session and open database connections to ensure a clean
# clean environment even if an error occurs later on
model.Session.remove()
model.Session.bind.dispose()
|
invesalius | project | # --------------------------------------------------------------------------
# Software: InVesalius - Software de Reconstrucao 3D de Imagens Medicas
# Copyright: (C) 2001 Centro de Pesquisas Renato Archer
# Homepage: http://www.softwarepublico.gov.br
# Contact: invesalius@cti.gov.br
# License: GNU - GPL 2 (LICENSE.txt/LICENCA.txt)
# --------------------------------------------------------------------------
# Este programa e software livre; voce pode redistribui-lo e/ou
# modifica-lo sob os termos da Licenca Publica Geral GNU, conforme
# publicada pela Free Software Foundation; de acordo com a versao 2
# da Licenca.
#
# Este programa eh distribuido na expectativa de ser util, mas SEM
# QUALQUER GARANTIA; sem mesmo a garantia implicita de
# COMERCIALIZACAO ou de ADEQUACAO A QUALQUER PROPOSITO EM
# PARTICULAR. Consulte a Licenca Publica Geral GNU para obter mais
# detalhes.
# --------------------------------------------------------------------------
import datetime
import glob
import os
import plistlib
import shutil
import sys
import tarfile
import tempfile
import invesalius.constants as const
import invesalius.data.polydata_utils as pu
import invesalius.version as version
import numpy as np
import wx
from invesalius import inv_paths
from invesalius.data import imagedata_utils
from invesalius.presets import Presets
from invesalius.pubsub import pub as Publisher
from invesalius.utils import Singleton, TwoWaysDictionary, debug, decode, touch
from vtkmodules.vtkCommonCore import vtkFileOutputWindow, vtkOutputWindow
if sys.platform == "win32":
try:
import win32api
_has_win32api = True
except ImportError:
_has_win32api = False
else:
_has_win32api = False
# Only one project will be initialized per time. Therefore, we use
# Singleton design pattern for implementing it
class Project(metaclass=Singleton):
def __init__(self):
# Patient/ acquistion information
self.name = ""
self.modality = ""
self.original_orientation = ""
self.window = ""
self.level = ""
self.affine = ""
# Masks (vtkImageData)
self.mask_dict = TwoWaysDictionary()
# Surfaces are (vtkPolyData)
self.surface_dict = {}
self.last_surface_index = -1
# Measurements
self.measurement_dict = {}
# TODO: Future ++
self.annotation_dict = {}
self.compress = False
# InVesalius related data
# So we can find bugs and reproduce user-related problems
self.invesalius_version = version.get_svn_revision()
self.presets = Presets()
self.threshold_modes = self.presets.thresh_ct
self.threshold_range = ""
self.raycasting_preset = ""
# self.surface_quality_list = ["Low", "Medium", "High", "Optimal *",
# "Custom"i]
# TOOD: define how we will relate this quality possibilities to
# values set as decimate / smooth
# TODO: Future +
# Allow insertion of new surface quality modes
def Close(self):
for name in self.__dict__:
attr = getattr(self, name)
del attr
self.__init__()
def AddMask(self, mask):
"""
Insert new mask (Mask) into project data.
input
@ mask: Mask associated to mask
output
@ index: index of item that was inserted
"""
index = len(self.mask_dict)
self.mask_dict[index] = mask
mask.index = index
return index
def RemoveMask(self, index):
new_dict = TwoWaysDictionary()
new_index = 0
for i in self.mask_dict:
if i == index:
mask = self.mask_dict[i]
mask.cleanup()
else:
new_dict[new_index] = self.mask_dict[i]
self.mask_dict[i] = new_index
new_index += 1
self.mask_dict = new_dict
def GetMask(self, index):
return self.mask_dict[index]
def AddSurface(self, surface):
# self.last_surface_index = surface.index
index = len(self.surface_dict)
self.surface_dict[index] = surface
return index
def ChangeSurface(self, surface):
index = surface.index
self.surface_dict[index] = surface
def RemoveSurface(self, index):
new_dict = {}
for i in self.surface_dict:
if i < index:
new_dict[i] = self.surface_dict[i]
if i > index:
new_dict[i - 1] = self.surface_dict[i]
new_dict[i - 1].index = i - 1
self.surface_dict = new_dict
def AddMeasurement(self, measurement):
index = len(self.measurement_dict)
measurement.index = index
self.measurement_dict[index] = measurement
return index
def ChangeMeasurement(self, measurement):
index = measurement.index
self.measurement_dict[index] = measurement
def RemoveMeasurement(self, index):
new_dict = {}
for i in self.measurement_dict:
if i < index:
new_dict[i] = self.measurement_dict[i]
if i > index:
new_dict[i - 1] = self.measurement_dict[i]
new_dict[i - 1].index = i - 1
self.measurement_dict = new_dict
def SetAcquisitionModality(self, type_=None):
if type_ is None:
type_ = self.modality
if type_ == "MRI":
self.threshold_modes = self.presets.thresh_mri
elif type_ == "CT":
self.threshold_modes = self.presets.thresh_ct
else:
debug("Different Acquisition Modality!!!")
self.modality = type_
def SetRaycastPreset(self, label):
path = os.path.join(RAYCASTING_PRESETS_DIRECTORY, label + ".plist")
with open(path, "r+b") as f:
preset = plistlib.load(f, fmt=plistlib.FMT_XML)
Publisher.sendMessage("Set raycasting preset", preset)
def GetMeasuresDict(self):
measures = {}
d = self.measurement_dict
for i in d:
m = d[i]
measures[str(m.index)] = m.get_as_dict()
return measures
def SavePlistProject(self, dir_, filename, compress=False):
dir_temp = decode(tempfile.mkdtemp(), const.FS_ENCODE)
self.compress = compress
filename_tmp = os.path.join(dir_temp, "matrix.dat")
filelist = {}
project = {
# Format info
"format_version": const.INVESALIUS_ACTUAL_FORMAT_VERSION,
"invesalius_version": const.INVESALIUS_VERSION,
"date": datetime.datetime.now().isoformat(),
"compress": self.compress,
# case info
"name": self.name, # patient's name
"modality": self.modality, # CT, RMI, ...
"orientation": self.original_orientation,
"window_width": self.window,
"window_level": self.level,
"scalar_range": self.threshold_range,
"spacing": self.spacing,
"affine": self.affine,
}
# Saving the matrix containing the slices
matrix = {
"filename": "matrix.dat",
"shape": self.matrix_shape,
"dtype": self.matrix_dtype,
}
project["matrix"] = matrix
filelist[self.matrix_filename] = "matrix.dat"
# shutil.copyfile(self.matrix_filename, filename_tmp)
# Saving the masks
masks = {}
for index in self.mask_dict:
masks[str(index)] = self.mask_dict[index].SavePlist(dir_temp, filelist)
project["masks"] = masks
# Saving the surfaces
surfaces = {}
for index in self.surface_dict:
surfaces[str(index)] = self.surface_dict[index].SavePlist(
dir_temp, filelist
)
project["surfaces"] = surfaces
# Saving the measurements
measurements = self.GetMeasuresDict()
measurements_filename = "measurements.plist"
temp_mplist = tempfile.mktemp()
with open(temp_mplist, "w+b") as f:
plistlib.dump(measurements, f)
filelist[temp_mplist] = measurements_filename
project["measurements"] = measurements_filename
# Saving the annotations (empty in this version)
project["annotations"] = {}
# Saving the main plist
temp_plist = tempfile.mktemp()
with open(temp_plist, "w+b") as f:
plistlib.dump(project, f)
filelist[temp_plist] = "main.plist"
# Compressing and generating the .inv3 file
path = os.path.join(dir_, filename)
Compress(dir_temp, path, filelist, compress)
# Removing the temp folder.
shutil.rmtree(dir_temp)
for f in filelist:
if filelist[f].endswith(".plist"):
os.remove(f)
def OpenPlistProject(self, filename):
if not const.VTK_WARNING:
log_path = os.path.join(inv_paths.USER_LOG_DIR, "vtkoutput.txt")
fow = vtkFileOutputWindow()
fow.SetFileName(log_path.encode(const.FS_ENCODE))
ow = vtkOutputWindow()
ow.SetInstance(fow)
filelist = Extract(filename, tempfile.mkdtemp())
dirpath = os.path.abspath(os.path.split(filelist[0])[0])
self.load_from_folder(dirpath)
def load_from_folder(self, dirpath):
"""
Loads invesalius3 project files from dipath.
"""
import invesalius.data.mask as msk
import invesalius.data.measures as ms
import invesalius.data.surface as srf
# Opening the main file from invesalius 3 project
main_plist = os.path.join(dirpath, "main.plist")
with open(main_plist, "r+b") as f:
project = plistlib.load(f, fmt=plistlib.FMT_XML)
format_version = project["format_version"]
if format_version > const.INVESALIUS_ACTUAL_FORMAT_VERSION:
from invesalius.gui.dialogs import ImportOldFormatInvFile
ImportOldFormatInvFile()
# case info
self.name = project["name"]
self.modality = project["modality"]
self.original_orientation = project["orientation"]
self.window = project["window_width"]
self.level = project["window_level"]
self.threshold_range = project["scalar_range"]
self.spacing = project["spacing"]
self.compress = project.get("compress", True)
# Opening the matrix containing the slices
filepath = os.path.join(dirpath, project["matrix"]["filename"])
self.matrix_filename = filepath
self.matrix_shape = project["matrix"]["shape"]
self.matrix_dtype = project["matrix"]["dtype"]
if project.get("affine", ""):
self.affine = project["affine"]
# Opening the masks
self.mask_dict = TwoWaysDictionary()
for index in sorted(project.get("masks", []), key=lambda x: int(x)):
filename = project["masks"][index]
filepath = os.path.join(dirpath, filename)
m = msk.Mask()
m.spacing = self.spacing
m.OpenPList(filepath)
m.index = len(self.mask_dict)
self.mask_dict[m.index] = m
# Opening the surfaces
self.surface_dict = {}
for index in sorted(project.get("surfaces", []), key=lambda x: int(x)):
filename = project["surfaces"][index]
filepath = os.path.join(dirpath, filename)
s = srf.Surface(int(index))
s.OpenPList(filepath)
self.surface_dict[s.index] = s
# Opening the measurements
self.measurement_dict = {}
measures_file = os.path.join(
dirpath, project.get("measurements", "measurements.plist")
)
if os.path.exists(measures_file):
with open(measures_file, "r+b") as f:
measurements = plistlib.load(f, fmt=plistlib.FMT_XML)
for index in measurements:
if measurements[index]["type"] in (
const.DENSITY_ELLIPSE,
const.DENSITY_POLYGON,
):
measure = ms.DensityMeasurement()
else:
measure = ms.Measurement()
measure.Load(measurements[index])
self.measurement_dict[int(index)] = measure
def create_project_file(
self,
name,
spacing,
modality,
orientation,
window_width,
window_level,
image,
affine="",
folder=None,
):
if folder is None:
folder = tempfile.mkdtemp()
if not os.path.exists(folder):
os.mkdir(folder)
image_file = os.path.join(folder, "matrix.dat")
image_mmap = imagedata_utils.array2memmap(image, image_file)
matrix = {
"filename": "matrix.dat",
"shape": image.shape,
"dtype": str(image.dtype),
}
project = {
# Format info
"format_version": const.INVESALIUS_ACTUAL_FORMAT_VERSION,
"invesalius_version": const.INVESALIUS_VERSION,
"date": datetime.datetime.now().isoformat(),
"compress": True,
# case info
"name": name, # patient's name
"modality": modality, # CT, RMI, ...
"orientation": orientation,
"window_width": window_width,
"window_level": window_level,
"scalar_range": (int(image.min()), int(image.max())),
"spacing": spacing,
"affine": affine,
"matrix": matrix,
}
path = os.path.join(folder, "main.plist")
with open(path, "w+b") as f:
plistlib.dump(project, f)
def export_project(self, filename, save_masks=True):
if filename.lower().endswith(".hdf5") or filename.lower().endswith(".h5"):
self.export_project_to_hdf5(filename, save_masks)
elif filename.lower().endswith(".nii") or filename.lower().endswith(".nii.gz"):
self.export_project_to_nifti(filename, save_masks)
def export_project_to_hdf5(self, filename, save_masks=True):
import h5py
import invesalius.data.slice_ as slc
s = slc.Slice()
with h5py.File(filename, "w") as f:
f["image"] = s.matrix
f["spacing"] = s.spacing
f["invesalius_version"] = const.INVESALIUS_VERSION
f["date"] = datetime.datetime.now().isoformat()
f["compress"] = self.compress
f["name"] = self.name # patient's name
f["modality"] = self.modality # CT, RMI, ...
f["orientation"] = self.original_orientation
f["window_width"] = self.window
f["window_level"] = self.level
f["scalar_range"] = self.threshold_range
if save_masks:
for index in self.mask_dict:
mask = self.mask_dict[index]
s.do_threshold_to_all_slices(mask)
key = "masks/{}".format(index)
f[key + "/name"] = mask.name
f[key + "/matrix"] = mask.matrix[1:, 1:, 1:]
f[key + "/colour"] = mask.colour[:3]
f[key + "/opacity"] = mask.opacity
f[key + "/threshold_range"] = mask.threshold_range
f[key + "/edition_threshold_range"] = mask.edition_threshold_range
f[key + "/visible"] = mask.is_shown
f[key + "/edited"] = mask.was_edited
def export_project_to_nifti(self, filename, save_masks=True):
import invesalius.data.slice_ as slc
import nibabel as nib
s = slc.Slice()
img_nifti = nib.Nifti1Image(np.swapaxes(np.fliplr(s.matrix), 0, 2), None)
img_nifti.header.set_zooms(s.spacing)
img_nifti.header.set_dim_info(slice=0)
nib.save(img_nifti, filename)
if save_masks:
for index in self.mask_dict:
mask = self.mask_dict[index]
s.do_threshold_to_all_slices(mask)
mask_nifti = nib.Nifti1Image(
np.swapaxes(np.fliplr(mask.matrix), 0, 2), None
)
mask_nifti.header.set_zooms(s.spacing)
if filename.lower().endswith(".nii"):
basename = filename[:-4]
ext = filename[-4::]
elif filename.lower().endswith(".nii.gz"):
basename = filename[:-7]
ext = filename[-7::]
else:
ext = ".nii"
basename = filename
nib.save(
mask_nifti,
"{}_mask_{}_{}{}".format(basename, mask.index, mask.name, ext),
)
def Compress(folder, filename, filelist, compress=False):
tmpdir, tmpdir_ = os.path.split(folder)
current_dir = os.path.abspath(".")
temp_inv3 = tempfile.mktemp()
if _has_win32api:
touch(temp_inv3)
temp_inv3 = win32api.GetShortPathName(temp_inv3)
temp_inv3 = decode(temp_inv3, const.FS_ENCODE)
# os.chdir(tmpdir)
# file_list = glob.glob(os.path.join(tmpdir_,"*"))
if compress:
tar = tarfile.open(temp_inv3, "w:gz")
else:
tar = tarfile.open(temp_inv3, "w")
for name in filelist:
tar.add(name, arcname=os.path.join(tmpdir_, filelist[name]))
tar.close()
shutil.move(temp_inv3, filename)
# os.chdir(current_dir)
def Extract(filename, folder):
if _has_win32api:
folder = win32api.GetShortPathName(folder)
folder = decode(folder, const.FS_ENCODE)
tar = tarfile.open(filename, "r")
idir = decode(os.path.split(tar.getnames()[0])[0], "utf8")
os.mkdir(os.path.join(folder, idir))
filelist = []
for t in tar.getmembers():
fsrc = tar.extractfile(t)
fname = os.path.join(folder, decode(t.name, "utf-8"))
fdst = open(fname, "wb")
shutil.copyfileobj(fsrc, fdst)
filelist.append(fname)
fsrc.close()
fdst.close()
del fsrc
del fdst
tar.close()
return filelist
def Extract_(filename, folder):
tar = tarfile.open(filename, "r:gz")
# tar.list(verbose=True)
tar.extractall(folder)
filelist = [os.path.join(folder, i) for i in tar.getnames()]
tar.close()
return filelist
|
autokey | macro | import datetime
import shlex
from abc import abstractmethod
from autokey import common
from autokey.model.key import KEY_SPLIT_RE, Key
if common.USING_QT:
from PyQt5.QtWidgets import QAction
def _(text: str, args: tuple = None):
"""localisation function, currently returns the identity. If args are given, those are used to format
text using the old-style % formatting."""
if args:
text = text % args
return text
class MacroAction(QAction):
def __init__(self, menu, macro, callback):
super(MacroAction, self).__init__(macro.TITLE, menu)
self.macro = macro
self.callback = callback
self.triggered.connect(self.on_triggered)
def on_triggered(self):
self.callback(self.macro)
else:
from gi.repository import Gtk
# Escape any escaped angle brackets
def encode_escaped_brackets(s):
# If you need a literal '\' at the end of the macro args... IDK. Add a
# space before the >?
# If you need a literal \>, just add an extra \.
# s.replace("\\\\", chr(27)) # ASCII Escape
# Use arbitrary nonprinting ascii to represent escaped char.
# Easier than having to parse escape chars.
s = s.replace("\\<", chr(0x1E)) # Record seperator
s = s.replace("\\>", chr(0x1F)) # unit seperator
# s.replace(chr(27), "\\")
return s
def decode_escaped_brackets(s):
s = s.replace(chr(0x1E), "<") # Record seperator
s = s.replace(chr(0x1F), ">") # unit seperator
return s
def sections_decode_escaped_brackets(sections):
for i, s in enumerate(sections):
sections[i] = decode_escaped_brackets(s)
# This must be passed a string containing only one macro.
def extract_tag(s):
if not isinstance(s, str):
raise TypeError
extracted = [p.split(">")[0] for p in s.split("<") if ">" in p]
if len(extracted) == 0:
return s
else:
return "".join(extracted)
def split_key_val(s):
# Split as if a shell argument.
# Splits at spaces, but preserves spaces within quotes.
pairs = shlex.split(s)
return dict(pair.split("=", 1) for pair in pairs)
class MacroManager:
def __init__(self, engine):
self.macros = []
self.macros.append(ScriptMacro(engine))
self.macros.append(DateMacro())
self.macros.append(FileContentsMacro())
self.macros.append(CursorMacro())
self.macros.append(SystemMacro(engine))
def get_menu(self, callback, menu=None):
if common.USING_QT:
for macro in self.macros:
menu.addAction(MacroAction(menu, macro, callback))
else:
menu = Gtk.Menu()
for macro in self.macros:
menuItem = Gtk.MenuItem(macro.TITLE)
menuItem.connect("activate", callback, macro)
menu.append(menuItem)
menu.show_all()
return menu
# Split expansion.string, expand and process its macros, then
# replace with the results.
def process_expansion_macros(self, content):
# Split into sections with <> macros in them.
# Using the Key split regex works for now.
content = encode_escaped_brackets(content)
content_sections = KEY_SPLIT_RE.split(content)
for macroClass in self.macros:
content_sections = macroClass.process(content_sections)
return "".join(content_sections)
class AbstractMacro:
@property
@abstractmethod
def ID(self):
pass
@property
@abstractmethod
def TITLE(self):
pass
@property
@abstractmethod
def ARGS(self):
pass
def get_token(self):
ret = "<%s" % self.ID
# TODO: v not used in initial implementation? This results in something like "<%s a= b= c=>"
ret += "".join((" " + k + "=" for k, v in self.ARGS))
ret += ">"
return ret
def _get_args(self, macro):
args = split_key_val(macro)
expected_args = [arg[0] for arg in self.ARGS]
expected_argnum = len(self.ARGS)
for arg in expected_args:
if arg not in args:
raise ValueError(
"Missing mandatory argument '{}' for macro '{}'".format(
arg, self.ID
)
)
for arg in args:
if arg not in expected_args:
raise ValueError(
"Unexpected argument '{}' for macro '{}'".format(arg, self.ID)
)
return args
def _extract_macro(self, section):
content = extract_tag(section)
content = decode_escaped_brackets(content)
# type is space-separated from rest of macro.
# Cursor macros have no space.
if " " in content:
macro_type, macro = content.split(" ", 1)
else:
macro_type, macro = (content, "")
return macro_type, macro
def process(self, sections):
for i, section in enumerate(sections):
# if MACRO_SPLIT_RE.match(section):
if KEY_SPLIT_RE.match(section):
macro_type, macro = self._extract_macro(sections[i])
if macro_type == self.ID:
# parts and i are required for cursor macros.
sections = self.do_process(sections, i)
return sections
@abstractmethod
def do_process(self, sections, i):
"""Returns updated sections"""
# parts and i are required for cursor macros.
return sections
class CursorMacro(AbstractMacro):
ID = "cursor"
TITLE = _("Position cursor")
ARGS = []
def do_process(self, sections, i):
try:
lefts = len("".join(sections[i + 1 :]))
sections.append(Key.LEFT * lefts)
sections[i] = ""
except IndexError:
pass
return sections
class ScriptMacro(AbstractMacro):
ID = "script"
TITLE = _("Run script")
ARGS = [("name", _("Name")), ("args", _("Arguments (comma separated)"))]
def __init__(self, engine):
self.engine = engine
def do_process(self, sections, i):
macro_type, macro = self._extract_macro(sections[i])
args = self._get_args(macro)
self.engine.run_script_from_macro(args)
sections[i] = self.engine._get_return_value()
return sections
class SystemMacro(AbstractMacro):
ID = "system"
TITLE = _("Run system command")
ARGS = [
(
"command",
_("Command to be executed (including any arguments) - e.g. 'ls -l'"),
),
]
# ("getOutput", _("True or False, whether or not to set the return
# value to the script's stdout (blocks until script finishes). If
# false, "))]
def __init__(self, engine):
self.engine = engine
def do_process(self, sections, i):
macro_type, macro = self._extract_macro(sections[i])
args = self._get_args(macro)
self.engine.run_system_command_from_macro(args)
sections[i] = self.engine._get_return_value()
return sections
class DateMacro(AbstractMacro):
ID = "date"
TITLE = _("Insert date")
ARGS = [("format", _("Format"))]
def do_process(self, sections, i):
macro_type, macro = self._extract_macro(sections[i])
format_ = self._get_args(macro)["format"]
date = datetime.datetime.now()
date = date.strftime(format_)
sections[i] = date
return sections
class FileContentsMacro(AbstractMacro):
ID = "file"
TITLE = _("Insert file contents")
ARGS = [("name", _("File name"))]
def do_process(self, sections, i):
macro_type, macro = self._extract_macro(sections[i])
name = self._get_args(macro)["name"]
with open(name, "r") as inputFile:
sections[i] = inputFile.read()
return sections
|
downloaders | DropDownload | # -*- coding: utf-8 -*-
from ..base.xfs_downloader import XFSDownloader
class DropDownload(XFSDownloader):
__name__ = "DropDownload"
__type__ = "downloader"
__version__ = "0.03"
__status__ = "testing"
__pattern__ = r"https?://(?:www\.)?drop\.download/\w{12}"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
]
__description__ = """Drop.download downloader plugin"""
__license__ = "GPLv3"
__authors__ = [("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
PLUGIN_DOMAIN = "drop.download"
LINK_PATTERN = r'<a href="(https://s\d+\.drop\.download.+?)"'
def setup(self):
self.multi_dl = True
self.resume_download = True
self.chunk_limit = -1
|
webengine | notification | # SPDX-FileCopyrightText: Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""Different ways of showing notifications to the user.
Our notification implementation consists of two different parts:
- NotificationBridgePresenter, the object we set as notification presenter on
QWebEngineProfiles on startup.
- Adapters (subclassing from AbstractNotificationAdapter) which get called by the bridge
and contain the code to show notifications using different means (e.g. a systray icon
or DBus).
Adapters are initialized lazily when the bridge gets the first notification. This makes
sure we don't block while e.g. talking to DBus during startup, but only when needed.
If an adapter raises Error during __init__, the bridge assumes that it's unavailable and
tries the next one in a list of candidates.
Useful test pages:
- https://tests.peter.sh/notification-generator/
- https://www.bennish.net/web-notifications.html
- https://web-push-book.gauntface.com/demos/notification-examples/
- tests/end2end/data/javascript/notifications.html
"""
import dataclasses
import functools
import html
import itertools
import os
import signal
import subprocess
from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Type
from qutebrowser.qt import machinery
from qutebrowser.qt.core import (
QByteArray,
QMetaType,
QObject,
QProcess,
Qt,
QTimer,
QUrl,
QVariant,
pyqtSignal,
pyqtSlot,
)
from qutebrowser.qt.dbus import (
QDBus,
QDBusArgument,
QDBusConnection,
QDBusError,
QDBusInterface,
QDBusMessage,
QDBusServiceWatcher,
)
from qutebrowser.qt.gui import QIcon, QImage, QPixmap
from qutebrowser.qt.widgets import QSystemTrayIcon
if TYPE_CHECKING:
# putting these behind TYPE_CHECKING also means this module is importable
# on installs that don't have these
from qutebrowser.qt.webenginecore import QWebEngineNotification, QWebEngineProfile
from qutebrowser.config import config
from qutebrowser.misc import objects
from qutebrowser.qt import sip
from qutebrowser.utils import (
debug,
log,
message,
objreg,
qtutils,
resources,
urlutils,
utils,
)
bridge: Optional["NotificationBridgePresenter"] = None
def init() -> None:
"""Initialize the DBus notification presenter, if applicable.
If the user doesn't want a notification presenter or it's not supported,
this method does nothing.
Always succeeds, but might log an error.
"""
if config.val.content.notifications.presenter == "qt":
# In theory, we could somehow postpone the install if the user switches to "qt"
# at a later point in time. However, doing so is probably too complex compared
# to its usefulness.
return
global bridge
bridge = NotificationBridgePresenter()
class Error(Exception):
"""Raised when something goes wrong with notifications."""
class DBusError(Error):
"""Raised when there was an error coming from DBus."""
_NON_FATAL_ERRORS = {
# notification daemon is gone
"org.freedesktop.DBus.Error.NoReply",
# https://gitlab.gnome.org/GNOME/gnome-flashback/-/blob/3.40.0/gnome-flashback/libnotifications/nd-daemon.c#L178-187
# Exceeded maximum number of notifications
"org.freedesktop.Notifications.MaxNotificationsExceeded",
# https://bugs.kde.org/show_bug.cgi?id=409157
# https://github.com/KDE/plasma-workspace/blob/v5.21.4/libnotificationmanager/server_p.cpp#L227-L237
# Created too many similar notifications in quick succession
"org.freedesktop.Notifications.Error.ExcessNotificationGeneration",
# From https://crashes.qutebrowser.org/view/b8c9838a
# Process org.freedesktop.Notifications received signal 5
# probably when notification daemon crashes?
"org.freedesktop.DBus.Error.Spawn.ChildSignaled",
# https://crashes.qutebrowser.org/view/f76f58ae
# Process org.freedesktop.Notifications exited with status 1
"org.freedesktop.DBus.Error.Spawn.ChildExited",
# https://crashes.qutebrowser.org/view/8889d0b5
# Could not activate remote peer.
"org.freedesktop.DBus.Error.NameHasNoOwner",
# https://crashes.qutebrowser.org/view/de62220a
# after "Notification daemon did quit!"
"org.freedesktop.DBus.Error.UnknownObject",
# notmuch-sha1-ef7b6e9e79e5f2f6cba90224122288895c1fe0d8
"org.freedesktop.DBus.Error.ServiceUnknown",
}
def __init__(self, msg: QDBusMessage) -> None:
assert msg.type() == QDBusMessage.MessageType.ErrorMessage
self.error = msg.errorName()
self.error_message = msg.errorMessage()
self.is_fatal = self.error not in self._NON_FATAL_ERRORS
text = f"{self.error}: {self.error_message}"
super().__init__(text)
class AbstractNotificationAdapter(QObject):
"""An adapter taking notifications and displaying them.
This can happen via different mechanisms, e.g. a system tray icon or DBus.
"""
# A short name for the adapter, shown in errors. Should be the same as the
# associated content.notification.presenter setting.
NAME: str
# Emitted by the adapter when the notification with the given ID was closed or
# clicked by the user.
close_id = pyqtSignal(int)
click_id = pyqtSignal(int)
# Emitted by the adapter when an error occurred, which should result in the adapter
# getting swapped out (potentially initializing the same adapter again, or using a
# different one if that fails).
error = pyqtSignal(str)
clear_all = pyqtSignal()
def present(
self,
qt_notification: "QWebEngineNotification",
*,
replaces_id: Optional[int],
) -> int:
"""Show the given notification.
If replaces_id is given, replace the currently showing notification with the
same ID.
Returns an ID assigned to the new notifications. IDs must be positive (>= 1) and
must not duplicate any active notification's ID.
"""
raise NotImplementedError
def _should_include_origin(self, origin: QUrl) -> bool:
"""Check if the origin is useful to include.
If we open the page via a file scheme, the origin is QUrl('file:///') which
doesn't help much.
"""
return bool(
origin.host()
and config.instance.get("content.notifications.show_origin", url=origin),
)
@pyqtSlot(int)
def on_web_closed(self, notification_id: int) -> None:
"""Called when a notification was closed by the website."""
raise NotImplementedError
class NotificationBridgePresenter(QObject):
"""Notification presenter which bridges notifications to an adapter.
Takes care of:
- Storing currently shown notifications, using an ID returned by the adapter.
- Initializing a suitable adapter when the first notification is shown.
- Switching out adapters if the current one emitted its error signal.
"""
def __init__(self, parent: QObject = None) -> None:
super().__init__(parent)
self._active_notifications: Dict[int, "QWebEngineNotification"] = {}
self._adapter: Optional[AbstractNotificationAdapter] = None
config.instance.changed.connect(self._init_adapter)
@config.change_filter("content.notifications.presenter")
def _init_adapter(self) -> None:
"""Initialize the adapter to use based on the config."""
setting = config.val.content.notifications.presenter
log.misc.debug(f"Setting up notification adapter ({setting})...")
if setting == "qt":
message.error("Can't switch to qt notification presenter at runtime.")
setting = "auto"
for candidate in self._get_adapter_candidates(setting):
try:
self._adapter = candidate()
except Error as e:
msg = f"Failed to initialize {candidate.NAME} notification adapter: {e}"
if candidate.NAME == setting: # We picked this one explicitly
message.error(msg)
else: # automatic fallback
log.misc.debug(msg)
else:
log.misc.debug(f"Initialized {self._adapter.NAME} notification adapter")
break
assert self._adapter is not None
self._adapter.click_id.connect(self._on_adapter_clicked)
self._adapter.close_id.connect(self._on_adapter_closed)
self._adapter.error.connect(self._on_adapter_error)
self._adapter.clear_all.connect(self._on_adapter_clear_all)
def _get_adapter_candidates(
self,
setting: str,
) -> List[Type[AbstractNotificationAdapter]]:
candidates: Dict[str, List[Type[AbstractNotificationAdapter]]] = {
"libnotify": [
DBusNotificationAdapter,
SystrayNotificationAdapter,
MessagesNotificationAdapter,
],
"systray": [
SystrayNotificationAdapter,
DBusNotificationAdapter,
MessagesNotificationAdapter,
],
"herbe": [
HerbeNotificationAdapter,
DBusNotificationAdapter,
SystrayNotificationAdapter,
MessagesNotificationAdapter,
],
"messages": [MessagesNotificationAdapter], # always succeeds
}
candidates["auto"] = candidates["libnotify"]
return candidates[setting]
def install(self, profile: "QWebEngineProfile") -> None:
"""Set the profile to use this bridge as the presenter."""
profile.setNotificationPresenter(self.present)
def present(self, qt_notification: "QWebEngineNotification") -> None:
"""Show a notification using the configured adapter.
Lazily initializes a suitable adapter if none exists yet.
This should *not* be directly passed to setNotificationPresenter on
PyQtWebEngine < 5.15 because of a bug in the PyQtWebEngine bindings.
"""
if self._adapter is None:
self._init_adapter()
assert self._adapter is not None
replaces_id = self._find_replaces_id(qt_notification)
qtutils.ensure_valid(qt_notification.origin())
notification_id = self._adapter.present(
qt_notification, replaces_id=replaces_id
)
log.misc.debug(f"New notification ID from adapter: {notification_id}")
if self._adapter is None:
# If a fatal error occurred, we replace the adapter via its "error" signal.
log.misc.debug("Adapter vanished, bailing out") # type: ignore[unreachable]
return
if replaces_id is None:
if notification_id in self._active_notifications:
raise Error(f"Got duplicate id {notification_id}")
qt_notification.show()
self._active_notifications[notification_id] = qt_notification
qt_notification.closed.connect(
functools.partial(self._adapter.on_web_closed, notification_id)
)
def _find_replaces_id(
self,
new_notification: "QWebEngineNotification",
) -> Optional[int]:
"""Find an existing notification to replace.
If no notification should be replaced or the notification to be replaced was not
found, this returns None.
"""
if not new_notification.tag():
return None
log.misc.debug(
f"Finding notification for tag {new_notification.tag()}, "
f"origin {new_notification.origin()}"
)
for notification_id, notification in sorted(
self._active_notifications.items(), reverse=True
):
if notification.matches(new_notification):
log.misc.debug(f"Found match: {notification_id}")
return notification_id
log.misc.debug("Did not find match")
return None
@pyqtSlot(int)
def _on_adapter_closed(self, notification_id: int) -> None:
"""A notification was closed by the adapter (usually due to the user).
Accepts unknown notification IDs, as this can be called for notifications from
other applications (with the DBus adapter).
"""
log.misc.debug(f"Notification {notification_id} closed by adapter")
try:
notification = self._active_notifications.pop(notification_id)
except KeyError:
log.misc.debug("Did not find matching notification, ignoring")
# Notification from a different application
return
notification.close()
@pyqtSlot(int)
def _on_adapter_clicked(self, notification_id: int) -> None:
"""A notification was clicked by the adapter (usually due to the user).
Accepts unknown notification IDs, as this can be called for notifications from
other applications (with the DBus adapter).
"""
log.misc.debug(f"Notification {notification_id} clicked by adapter")
try:
notification = self._active_notifications[notification_id]
except KeyError:
# Notification from a different application
log.misc.debug("Did not find matching notification, ignoring")
return
notification.click()
self._focus_first_matching_tab(notification)
def _focus_first_matching_tab(self, notification: "QWebEngineNotification") -> None:
for win_id in objreg.window_registry:
tabbedbrowser = objreg.get("tabbed-browser", window=win_id, scope="window")
for idx, tab in enumerate(tabbedbrowser.widgets()):
if tab.url().matches(
notification.origin(), QUrl.UrlFormattingOption.RemovePath
):
tabbedbrowser.widget.setCurrentIndex(idx)
return
log.misc.debug(f"No matching tab found for {notification.origin()}")
def _drop_adapter(self) -> None:
"""Drop the currently active adapter (if any).
This means we'll reinitialize a new one (including re-testing available options)
on the next notification.
"""
if self._adapter:
log.misc.debug(f"Dropping adapter {self._adapter.NAME}")
self._adapter.deleteLater()
self._adapter = None
self._on_adapter_clear_all()
@pyqtSlot()
def _on_adapter_clear_all(self) -> None:
"""Called when the adapter requests clearing all notifications.
This is currently only done if the DBus notification server was unregistered.
It's probably safe to assume no notifications exist anymore. Also, this makes
sure we don't have any duplicate IDs.
Depending on the system, either the server will automatically be restarted on
the next notification, or we'll get a (properly handled) NoReply error then.
"""
for notification_id in list(self._active_notifications):
self._on_adapter_closed(notification_id)
@pyqtSlot(str)
def _on_adapter_error(self, error: str) -> None:
"""A fatal error happened in the adapter.
This causes us to drop the current adapter and reinit it (or a different one) on
the next notification.
"""
if self._adapter is None:
# Error during setup
return
message.error(f"Notification error from {self._adapter.NAME} adapter: {error}")
self._drop_adapter()
class SystrayNotificationAdapter(AbstractNotificationAdapter):
"""Shows notifications using QSystemTrayIcon.
This is essentially a reimplementation of QtWebEngine's default implementation:
https://github.com/qt/qtwebengine/blob/v5.15.2/src/webenginewidgets/api/qwebenginenotificationpresenter.cpp
It exists because QtWebEngine won't allow us to restore its default presenter, so if
something goes wrong when trying to e.g. connect to the DBus one, we still want to
be able to switch back after our presenter is already installed. Also, it's nice if
users can switch presenters in the config live.
"""
NAME = "systray"
NOTIFICATION_ID = 1 # only one concurrent notification supported
def __init__(self, parent: QObject = None) -> None:
super().__init__(parent)
if not QSystemTrayIcon.isSystemTrayAvailable():
raise Error("No system tray available")
if not QSystemTrayIcon.supportsMessages():
raise Error("System tray does not support messages")
self._systray = QSystemTrayIcon(self)
self._systray.setIcon(objects.qapp.windowIcon())
self._systray.messageClicked.connect(self._on_systray_clicked)
def present(
self,
qt_notification: "QWebEngineNotification",
*,
replaces_id: Optional[int],
) -> int:
utils.unused(replaces_id) # QSystemTray can only show one message
self.close_id.emit(self.NOTIFICATION_ID)
self._systray.show()
icon = self._convert_icon(qt_notification.icon())
msg = self._format_message(qt_notification.message(), qt_notification.origin())
self._systray.showMessage(qt_notification.title(), msg, icon)
return self.NOTIFICATION_ID
def _convert_icon(self, image: QImage) -> QIcon:
"""Convert a QImage to a QIcon."""
if image.isNull():
return QIcon()
pixmap = QPixmap.fromImage(image, Qt.ImageConversionFlag.NoFormatConversion)
assert not pixmap.isNull()
icon = QIcon(pixmap)
assert not icon.isNull()
return icon
def _format_message(self, text: str, origin: QUrl) -> str:
"""Format the message to display."""
if not self._should_include_origin(origin):
return text
return origin.toDisplayString() + "\n\n" + text
@pyqtSlot()
def _on_systray_clicked(self) -> None:
self.click_id.emit(self.NOTIFICATION_ID)
@pyqtSlot(int)
def on_web_closed(self, notification_id: int) -> None:
assert notification_id == self.NOTIFICATION_ID, notification_id
if not sip.isdeleted(self._systray):
# This can get called during shutdown
self._systray.hide()
class MessagesNotificationAdapter(AbstractNotificationAdapter):
"""Shows notifications using qutebrowser messages.
This is mostly used as a fallback if no other method is available. Most notification
features are not supported.
Note that it's expected for this adapter to never fail (i.e. not raise Error in
__init__ and not emit the error signal), as it's used as a "last resort" fallback.
"""
NAME = "messages"
def __init__(self, parent: QObject = None) -> None:
super().__init__(parent)
self._id_gen = itertools.count(1)
def present(
self,
qt_notification: "QWebEngineNotification",
*,
replaces_id: Optional[int],
) -> int:
markup = self._format_message(qt_notification)
new_id = replaces_id if replaces_id is not None else next(self._id_gen)
message.info(markup, replace=f"notifications-{new_id}", rich=True)
# Faking closing, timing might not be 100% accurate
QTimer.singleShot(
config.val.messages.timeout, lambda: self.close_id.emit(new_id)
)
return new_id
@pyqtSlot(int)
def on_web_closed(self, _notification_id: int) -> None:
"""We can't close messages."""
def _format_message(self, qt_notification: "QWebEngineNotification") -> str:
title = html.escape(qt_notification.title())
body = html.escape(qt_notification.message())
hint = "" if qt_notification.icon().isNull() else " (image not shown)"
if self._should_include_origin(qt_notification.origin()):
url = html.escape(qt_notification.origin().toDisplayString())
origin_str = f" from {url}"
else:
origin_str = ""
return (
f"<i>Notification{origin_str}:{hint}</i><br/><br/>"
f"<b>{title}</b><br/>"
f"{body}"
)
class HerbeNotificationAdapter(AbstractNotificationAdapter):
"""Shows notifications using herbe.
See https://github.com/dudik/herbe
"""
NAME = "herbe"
def __init__(self, parent: QObject = None) -> None:
super().__init__(parent)
# Also cleans up potentially hanging semaphores from herbe.
# https://github.com/dudik/herbe#notifications-dont-show-up
try:
subprocess.run(["herbe"], stderr=subprocess.DEVNULL, check=True)
except OSError as e:
raise Error(f"herbe error: {e}")
except subprocess.CalledProcessError as e:
if e.returncode != 1:
raise Error(f"herbe exited with status {e.returncode}")
def present(
self,
qt_notification: "QWebEngineNotification",
*,
replaces_id: Optional[int],
) -> int:
if replaces_id is not None:
self.on_web_closed(replaces_id)
proc = QProcess(self)
proc.errorOccurred.connect(self._on_error)
lines = list(self._message_lines(qt_notification))
proc.start("herbe", lines)
pid = proc.processId()
assert pid > 1
proc.finished.connect(functools.partial(self._on_finished, pid))
return pid
def _message_lines(
self,
qt_notification: "QWebEngineNotification",
) -> Iterator[str]:
"""Get the lines to display for this notification."""
yield qt_notification.title()
origin = qt_notification.origin()
if self._should_include_origin(origin):
yield origin.toDisplayString()
yield qt_notification.message()
if not qt_notification.icon().isNull():
yield "(icon not shown)"
def _on_finished(self, pid: int, code: int, status: QProcess.ExitStatus) -> None:
"""Handle a closing herbe process.
From the GitHub page:
- "An accepted notification always returns exit code 0."
- "Dismissed notifications return exit code 2."
Any other exit status should never happen.
We ignore CrashExit as SIGUSR1/SIGUSR2 are expected "crashes", and for any other
signals, we can't do much - emitting self.error would just go use herbe again,
so there's no point.
"""
if status == QProcess.ExitStatus.CrashExit:
pass
elif code == 0:
self.click_id.emit(pid)
elif code == 2:
pass
else:
proc = self.sender()
assert isinstance(proc, QProcess), proc
stderr = proc.readAllStandardError()
raise Error(f"herbe exited with status {code}: {stderr}")
self.close_id.emit(pid)
@pyqtSlot(QProcess.ProcessError)
def _on_error(self, error: QProcess.ProcessError) -> None:
if error == QProcess.ProcessError.Crashed:
return
name = debug.qenum_key(QProcess, error)
self.error.emit(f"herbe process error: {name}")
@pyqtSlot(int)
def on_web_closed(self, notification_id: int) -> None:
"""Handle closing the notification from JS.
From herbe's README:
"A notification can be dismissed [...] [by] sending a SIGUSR1 signal to it"
"""
os.kill(notification_id, signal.SIGUSR1)
# Make sure we immediately remove it from active notifications
self.close_id.emit(notification_id)
@dataclasses.dataclass
class _ServerQuirks:
"""Quirks for certain DBus notification servers."""
spec_version: Optional[str] = None
avoid_actions: bool = False
avoid_body_hyperlinks: bool = False
escape_title: bool = False
icon_key: Optional[str] = None
skip_capabilities: bool = False
wrong_replaces_id: bool = False
no_padded_images: bool = False
wrong_closes_type: bool = False
@dataclasses.dataclass
class _ServerCapabilities:
"""Notification capabilities supported by the server."""
actions: bool
body_markup: bool
body_hyperlinks: bool
kde_origin_name: bool
@classmethod
def from_list(cls, capabilities: List[str]) -> "_ServerCapabilities":
return cls(
actions="actions" in capabilities,
body_markup="body-markup" in capabilities,
body_hyperlinks="body-hyperlinks" in capabilities,
kde_origin_name="x-kde-origin-name" in capabilities,
)
def _as_uint32(x: int) -> QVariant:
"""Convert the given int to an uint32 for DBus."""
variant = QVariant(x)
if machinery.IS_QT5:
target = QVariant.Type.UInt
else: # Qt 6
# FIXME:mypy PyQt6-stubs issue
target = QMetaType(QMetaType.Type.UInt.value) # type: ignore[call-overload]
successful = variant.convert(target)
assert successful
return variant
class DBusNotificationAdapter(AbstractNotificationAdapter):
"""Send notifications over DBus.
This is essentially what libnotify does, except using Qt's DBus implementation.
Related specs:
https://developer.gnome.org/notification-spec/
https://specifications.freedesktop.org/notification-spec/notification-spec-latest.html
https://wiki.ubuntu.com/NotificationDevelopmentGuidelines
"""
SERVICE = "org.freedesktop.Notifications"
TEST_SERVICE = "org.qutebrowser.TestNotifications"
PATH = "/org/freedesktop/Notifications"
INTERFACE = "org.freedesktop.Notifications"
SPEC_VERSION = "1.2" # Released in January 2011, still current in March 2021.
NAME = "libnotify"
def __init__(self, parent: QObject = None) -> None:
super().__init__(parent)
if utils.is_windows:
# The QDBusConnection destructor seems to cause error messages (and
# potentially segfaults) on Windows, so we bail out early in that case.
# We still try to get a connection on macOS, since it's theoretically
# possible to run DBus there.
raise Error("libnotify is not supported on Windows")
bus = QDBusConnection.sessionBus()
if not bus.isConnected():
raise Error(
"Failed to connect to DBus session bus: "
+ self._dbus_error_str(bus.lastError())
)
self._watcher = QDBusServiceWatcher(
self.SERVICE,
bus,
QDBusServiceWatcher.WatchModeFlag.WatchForUnregistration,
self,
)
self._watcher.serviceUnregistered.connect(self._on_service_unregistered)
test_service = "test-notification-service" in objects.debug_flags
service = f"{self.TEST_SERVICE}{os.getpid()}" if test_service else self.SERVICE
self.interface = QDBusInterface(service, self.PATH, self.INTERFACE, bus)
if not self.interface.isValid():
raise Error(
"Could not construct a DBus interface: "
+ self._dbus_error_str(self.interface.lastError())
)
connections = [
("NotificationClosed", self._handle_close),
("ActionInvoked", self._handle_action),
]
for name, func in connections:
if not bus.connect(service, self.PATH, self.INTERFACE, name, func):
raise Error(
f"Could not connect to {name}: "
+ self._dbus_error_str(bus.lastError())
)
self._quirks = _ServerQuirks()
self._get_server_info()
if self._quirks.skip_capabilities:
self._capabilities = _ServerCapabilities.from_list([])
else:
self._fetch_capabilities()
@pyqtSlot(str)
def _on_service_unregistered(self) -> None:
"""Make sure we know when the notification daemon exits.
If that's the case, we bail out, as otherwise notifications would fail or the
next start of the server would lead to duplicate notification IDs.
"""
log.misc.debug("Notification daemon did quit!")
self.clear_all.emit()
def _find_quirks( # noqa: C901 ("too complex"
self,
name: str,
vendor: str,
ver: str,
) -> Optional[_ServerQuirks]:
"""Find quirks to use based on the server information."""
if (name, vendor) == ("notify-osd", "Canonical Ltd"):
# Shows a dialog box instead of a notification bubble as soon as a
# notification has an action (even if only a default one). Dialog boxes are
# buggy and return a notification with ID 0.
# https://wiki.ubuntu.com/NotificationDevelopmentGuidelines#Avoiding_actions
return _ServerQuirks(avoid_actions=True, spec_version="1.1")
elif (name, vendor) == ("Notification Daemon", "MATE"):
# Still in active development but doesn't implement spec 1.2:
# https://github.com/mate-desktop/mate-notification-daemon/issues/132
quirks = _ServerQuirks(spec_version="1.1")
if utils.VersionNumber.parse(ver) <= utils.VersionNumber(1, 24):
# https://github.com/mate-desktop/mate-notification-daemon/issues/118
quirks.avoid_body_hyperlinks = True
return quirks
elif (name, vendor) == ("naughty", "awesome") and ver != "devel":
# Still in active development but spec 1.0/1.2 support isn't
# released yet:
# https://github.com/awesomeWM/awesome/commit/e076bc664e0764a3d3a0164dabd9b58d334355f4
parsed_version = utils.VersionNumber.parse(ver.lstrip("v"))
if parsed_version <= utils.VersionNumber(4, 3):
return _ServerQuirks(spec_version="1.0")
elif (name, vendor) == ("twmnd", "twmnd"):
# https://github.com/sboli/twmn/pull/96
return _ServerQuirks(spec_version="0")
elif (name, vendor) == ("tiramisu", "Sweets"):
if utils.VersionNumber.parse(ver) < utils.VersionNumber(2):
# https://github.com/Sweets/tiramisu/issues/20
return _ServerQuirks(skip_capabilities=True)
elif (name, vendor) == ("lxqt-notificationd", "lxqt.org"):
quirks = _ServerQuirks()
parsed_version = utils.VersionNumber.parse(ver)
if parsed_version <= utils.VersionNumber(0, 16):
# https://github.com/lxqt/lxqt-notificationd/issues/253
quirks.escape_title = True
if parsed_version < utils.VersionNumber(0, 16):
# https://github.com/lxqt/lxqt-notificationd/commit/c23e254a63c39837fb69d5c59c5e2bc91e83df8c
quirks.icon_key = "image_data"
return quirks
elif (name, vendor) == ("haskell-notification-daemon", "abc"): # aka "deadd"
return _ServerQuirks(
# https://github.com/phuhl/linux_notification_center/issues/160
spec_version="1.0",
# https://github.com/phuhl/linux_notification_center/issues/161
wrong_replaces_id=True,
)
elif (name, vendor) == ("ninomiya", "deifactor"):
return _ServerQuirks(
no_padded_images=True,
wrong_replaces_id=True,
)
elif (name, vendor) == ("Raven", "Budgie Desktop Developers"):
# Before refactor
return _ServerQuirks(
# https://github.com/solus-project/budgie-desktop/issues/2114
escape_title=True,
# https://github.com/solus-project/budgie-desktop/issues/2115
wrong_replaces_id=True,
)
elif (name, vendor) == (
"Budgie Notification Server",
"Budgie Desktop Developers",
):
# After refactor: https://github.com/BuddiesOfBudgie/budgie-desktop/pull/36
if utils.VersionNumber.parse(ver) < utils.VersionNumber(10, 6, 2):
return _ServerQuirks(
# https://github.com/BuddiesOfBudgie/budgie-desktop/issues/118
wrong_closes_type=True,
)
return None
def _get_server_info(self) -> None:
"""Query notification server information and set quirks."""
reply = self.interface.call(QDBus.CallMode.BlockWithGui, "GetServerInformation")
self._verify_message(reply, "ssss", QDBusMessage.MessageType.ReplyMessage)
name, vendor, ver, spec_version = reply.arguments()
log.misc.debug(
f"Connected to notification server: {name} {ver} by {vendor}, "
f"implementing spec {spec_version}"
)
quirks = self._find_quirks(name, vendor, ver)
if quirks is not None:
log.misc.debug(f"Enabling quirks {quirks}")
self._quirks = quirks
expected_spec_versions = [self.SPEC_VERSION]
if self._quirks.spec_version is not None:
expected_spec_versions.append(self._quirks.spec_version)
if spec_version not in expected_spec_versions:
log.misc.warning(
f"Notification server ({name} {ver} by {vendor}) implements "
f"spec {spec_version}, but {'/'.join(expected_spec_versions)} was "
f"expected. If {name} is up to date, please report a qutebrowser bug."
)
# https://specifications.freedesktop.org/notification-spec/latest/ar01s08.html
icon_key_overrides = {
"1.0": "icon_data",
"1.1": "image_data",
}
if spec_version in icon_key_overrides:
self._quirks.icon_key = icon_key_overrides[spec_version]
def _dbus_error_str(self, error: QDBusError) -> str:
"""Get a string for a DBus error."""
if not error.isValid():
return "Unknown error"
return f"{error.name()} - {error.message()}"
def _verify_message(
self,
msg: QDBusMessage,
expected_signature: str,
expected_type: QDBusMessage.MessageType,
) -> None:
"""Check the signature/type of a received message.
Raises DBusError if the signature doesn't match.
"""
assert expected_type not in [
QDBusMessage.MessageType.ErrorMessage,
QDBusMessage.MessageType.InvalidMessage,
], expected_type
if msg.type() == QDBusMessage.MessageType.ErrorMessage:
raise DBusError(msg)
signature = msg.signature()
if signature != expected_signature:
raise Error(
f"Got a message with signature {signature} but expected "
f"{expected_signature} (args: {msg.arguments()})"
)
typ = msg.type()
if typ != expected_type:
type_str = debug.qenum_key(QDBusMessage, typ)
expected_type_str = debug.qenum_key(QDBusMessage, expected_type)
raise Error(
f"Got a message of type {type_str} but expected {expected_type_str}"
f"(args: {msg.arguments()})"
)
def _verify_notification_id(
self,
notification_id: int,
*,
replaces_id: int,
) -> None:
"""Ensure the returned notification id is valid."""
if replaces_id not in [0, notification_id]:
msg = (
f"Wanted to replace notification {replaces_id} but got new id "
f"{notification_id}."
)
if self._quirks.wrong_replaces_id:
log.misc.debug(msg)
else:
log.misc.error(msg)
if notification_id <= 0:
self.error.emit(f"Got invalid notification id {notification_id}")
def _get_title_arg(self, title: str) -> str:
"""Get the title argument for present()."""
# Titles don't support markup (except with broken servers)
if self._quirks.escape_title:
return html.escape(title, quote=False)
return title
def _get_actions_arg(self) -> QDBusArgument:
"""Get the actions argument for present()."""
actions = []
if self._capabilities.actions:
actions = ["default", "Activate"] # key, name
return QDBusArgument(
actions,
qtutils.extract_enum_val(QMetaType.Type.QStringList),
)
def _get_hints_arg(self, *, origin_url: QUrl, icon: QImage) -> Dict[str, Any]:
"""Get the hints argument for present()."""
origin_url_str = origin_url.toDisplayString()
hints: Dict[str, Any] = {
# Include the origin in case the user wants to do different things
# with different origin's notifications.
"x-qutebrowser-origin": origin_url_str,
"desktop-entry": "org.qutebrowser.qutebrowser",
}
is_useful_origin = self._should_include_origin(origin_url)
if self._capabilities.kde_origin_name and is_useful_origin:
hints["x-kde-origin-name"] = origin_url_str
if icon.isNull():
filename = "icons/qutebrowser-64x64.png"
icon = QImage.fromData(resources.read_file_binary(filename))
key = self._quirks.icon_key or "image-data"
data = self._convert_image(icon)
if data is not None:
hints[key] = data
return hints
def _call_notify_wrapper(
self,
*,
appname: str,
replaces_id: QVariant,
icon: str,
title: str,
body: str,
actions: QDBusArgument,
hints: Dict[str, Any],
timeout: int,
) -> Any:
"""Wrapper around DBus call to use keyword args."""
return self.interface.call(
QDBus.CallMode.BlockWithGui,
"Notify",
appname,
replaces_id,
icon,
title,
body,
actions,
hints,
timeout,
)
def present(
self,
qt_notification: "QWebEngineNotification",
*,
replaces_id: Optional[int],
) -> int:
"""Shows a notification over DBus."""
if replaces_id is None:
replaces_id = 0 # 0 is never a valid ID according to the spec
reply = self._call_notify_wrapper(
appname="qutebrowser",
replaces_id=_as_uint32(replaces_id),
icon="", # we use image-data and friends instead
title=self._get_title_arg(qt_notification.title()),
body=self._format_body(
body=qt_notification.message(),
origin_url=qt_notification.origin(),
),
actions=self._get_actions_arg(),
hints=self._get_hints_arg(
origin_url=qt_notification.origin(),
icon=qt_notification.icon(),
),
timeout=-1, # use default
)
try:
self._verify_message(reply, "u", QDBusMessage.MessageType.ReplyMessage)
except DBusError as e:
if e.is_fatal:
raise
self.error.emit(e.error_message)
# Return value gets ignored in NotificationBridgePresenter.present
return -1
notification_id = reply.arguments()[0]
self._verify_notification_id(notification_id, replaces_id=replaces_id)
return notification_id
def _convert_image(self, qimage: QImage) -> Optional[QDBusArgument]:
"""Convert a QImage to the structure DBus expects.
https://specifications.freedesktop.org/notification-spec/latest/ar01s05.html#icons-and-images-formats
"""
bits_per_color = 8
has_alpha = qimage.hasAlphaChannel()
if has_alpha:
image_format = QImage.Format.Format_RGBA8888
channel_count = 4
else:
image_format = QImage.Format.Format_RGB888
channel_count = 3
qimage.convertTo(image_format)
bytes_per_line = qimage.bytesPerLine()
width = qimage.width()
height = qimage.height()
image_data = QDBusArgument()
image_data.beginStructure()
image_data.add(width)
image_data.add(height)
image_data.add(bytes_per_line)
image_data.add(has_alpha)
image_data.add(bits_per_color)
image_data.add(channel_count)
size = qimage.sizeInBytes()
# Despite the spec not mandating this, many notification daemons mandate that
# the last scanline does not have any padding bytes.
#
# Or in the words of dunst:
#
# The image is serialised rowwise pixel by pixel. The rows are aligned by a
# spacer full of garbage. The overall data length of data + garbage is
# called the rowstride.
#
# Mind the missing spacer at the last row.
#
# len: |<--------------rowstride---------------->|
# len: |<-width*pixelstride->|
# row 1: | data for row 1 | spacer of garbage |
# row 2: | data for row 2 | spacer of garbage |
# | . | spacer of garbage |
# | . | spacer of garbage |
# | . | spacer of garbage |
# row n-1: | data for row n-1 | spacer of garbage |
# row n: | data for row n |
#
# Source:
# https://github.com/dunst-project/dunst/blob/v1.6.1/src/icon.c#L292-L309
padding = bytes_per_line - width * channel_count
assert 0 <= padding <= 3, (padding, bytes_per_line, width, channel_count)
size -= padding
if padding and self._quirks.no_padded_images:
return None
bits_ptr = qimage.constBits()
assert bits_ptr is not None
bits = bits_ptr.asstring(size)
image_data.add(QByteArray(bits))
image_data.endStructure()
return image_data
@pyqtSlot(QDBusMessage)
def _handle_close(self, msg: QDBusMessage) -> None:
"""Handle NotificationClosed from DBus."""
try:
self._verify_message(msg, "uu", QDBusMessage.MessageType.SignalMessage)
except Error:
if not self._quirks.wrong_closes_type:
raise
self._verify_message(msg, "ui", QDBusMessage.MessageType.SignalMessage)
notification_id, _close_reason = msg.arguments()
self.close_id.emit(notification_id)
@pyqtSlot(QDBusMessage)
def _handle_action(self, msg: QDBusMessage) -> None:
"""Handle ActionInvoked from DBus."""
self._verify_message(msg, "us", QDBusMessage.MessageType.SignalMessage)
notification_id, action_key = msg.arguments()
if action_key == "default":
self.click_id.emit(notification_id)
@pyqtSlot(int)
def on_web_closed(self, notification_id: int) -> None:
"""Send CloseNotification if a notification was closed from JS."""
self.interface.call(
QDBus.CallMode.NoBlock,
"CloseNotification",
_as_uint32(notification_id),
)
def _fetch_capabilities(self) -> None:
"""Fetch capabilities from the notification server."""
reply = self.interface.call(
QDBus.CallMode.BlockWithGui,
"GetCapabilities",
)
self._verify_message(reply, "as", QDBusMessage.MessageType.ReplyMessage)
caplist = reply.arguments()[0]
self._capabilities = _ServerCapabilities.from_list(caplist)
if self._quirks.avoid_actions:
self._capabilities.actions = False
if self._quirks.avoid_body_hyperlinks:
self._capabilities.body_hyperlinks = False
log.misc.debug(f"Notification server capabilities: {self._capabilities}")
def _format_body(self, body: str, origin_url: QUrl) -> str:
"""Format the body according to the server capabilities.
If the server doesn't support x-kde-origin-name, we include the origin URL as a
prefix. If possible, we hyperlink it.
For both prefix and body, we'll need to HTML escape it if the server supports
body markup.
"""
urlstr = origin_url.toDisplayString()
is_useful_origin = self._should_include_origin(origin_url)
if self._capabilities.kde_origin_name or not is_useful_origin:
prefix = None
elif self._capabilities.body_markup and self._capabilities.body_hyperlinks:
href = html.escape(origin_url.toString(urlutils.FormatOption.ENCODED))
text = html.escape(urlstr, quote=False)
prefix = f'<a href="{href}">{text}</a>'
elif self._capabilities.body_markup:
prefix = html.escape(urlstr, quote=False)
else:
prefix = urlstr
if self._capabilities.body_markup:
body = html.escape(body, quote=False)
if prefix is None:
return body
return prefix + "\n\n" + body
|
fcbt | DistTools | # shell and operating system
import os
verbose = 0
dcount = fcount = 0
maxfileload = 100000
blksize = 1024 * 8
def BuildDistName():
# Building dist name
# reading the last Version information
[FCVersionMajor, FCVersionMinor, FCVersionBuild, FCVersionDisDa] = open(
"../Version.h", "r"
).readlines()
DistName = (
"FreeCAD_V"
+ FCVersionMajor[23:-1]
+ "."
+ FCVersionMinor[23:-1]
+ "B"
+ FCVersionBuild[23:-1]
)
return DistName
def BuildSetupName():
# Building dist name
# reading the last Version information
[FCVersionMajor, FCVersionMinor, FCVersionBuild, FCVersionDisDa] = open(
"../Version.h", "r"
).readlines()
DistName = "FreeCAD_V" + FCVersionMajor[23:-1] + "." + FCVersionMinor[23:-1]
return DistName
def GetVersion():
# Building dist name
# reading the last Version information
[FCVersionMajor, FCVersionMinor, FCVersionBuild, FCVersionDisDa] = open(
"../Version.h", "r"
).readlines()
return FCVersionMajor[23:-1] + "." + FCVersionMinor[23:-1]
def GetBuildNbr():
# Building dist name
# reading the last Version information
[FCVersionMajor, FCVersionMinor, FCVersionBuild, FCVersionDisDa] = open(
"../Version.h", "r"
).readlines()
return FCVersionBuild[23:-1]
def GetBuildDate():
# Building dist name
# reading the last Version information
[FCVersionMajor, FCVersionMinor, FCVersionBuild, FCVersionDisDa] = open(
"../Version.h", "r"
).readlines()
return FCVersionDisDa[23:-1]
def EnsureDir(name):
if not os.path.isdir(name):
os.mkdir(name)
return 0
else:
return 1
SrcFilter = [
"^.*\\.o$",
"^Debug$",
"^DebugCmd$",
"^DebugPy$",
"^Release$",
"^ReleaseCmd$",
"^ReleasePy$",
"^Attic$",
"^CVS$",
"^moc_\\.*$",
"^.*\\.opt$",
"^.*\\.ilg$",
"^.*\\.ps$",
"^.*\\.ind$",
"^.*\\.idx$",
"^.*\\.doc$",
"^.*\\.dvi$",
"^.*\\.ncb$",
"^.*\\.aux$",
"^.*\\.pdf$",
"^.*\\.toc$",
"^.*\\.exe$",
"^.*\\.png$",
"^.*\\.bak$",
"^.*\\.pyc$",
"^.*\\.dep$",
"^.*\\.log$",
"^.*\\.pyd$",
"^.*\\.ilk$",
"^.*\\.lib$",
"^.*\\.pdb$",
"^.*\\.exp$",
"^.*\\.bsc$",
"^.*\\.plg$",
]
BinFilter = [
"^Plugin\\.*$",
"^Standard\\.*$",
"^.*\\.xml$",
"^.*\\.log$",
"^.*\\.pdb$",
"^.*\\.ilk$",
"^.*\\.lib$",
"^.*\\.exp$",
"^.*\\.bsc$",
"^.*CADD.exe$",
"^.*CADAppD.dll$",
"^.*CmdD.exe$",
"^.*BaseD.dll$",
"^.*CADDCmdPy.dll$",
"^.*GuiD.dll$",
"^.*\\.bsc$",
"^.*\\.FCScript\\..*$",
"^.*\\.FCParam$",
"^.*\\.FCScript$",
]
LibFilter = [
"^Plugin\\.*$",
"^Standard\\.*$",
"^.*\\.xml$",
"^.*\\.log$",
"^.*\\.pdb$",
"^.*\\.ilk$",
"^.*\\.exe$",
"^.*\\.exp$",
"^.*\\.bsc$",
"^.*CADD.lib$",
"^.*CADAppD.lib$",
"^.*CmdD.lib$",
"^.*BaseD.lib$",
"^.*GuiD.lib$",
"^.*\\.FCScript\\..*$",
"^.*\\.FCParam$",
]
LibPackFilter = ["^.*\\.o$", "^Debug$"]
ModFilter = [
"^.*\\.o$",
"^Debug$",
"^DebugCmd$",
"^DebugPy$",
"^Release$",
"^ReleaseCmd$",
"^App$",
"^Gui$",
"^CVS$",
"^Attic$",
"^.*\\.opt$",
"^.*_d\.pyd$",
"^.*\\.opt$",
"^.*\\.ilg$",
"^.*\\.ps$",
"^.*\\.ind$",
"^.*\\.idx$",
"^.*\\.doc$",
"^.*\\.dvi$",
"^.*\\.ncb$",
"^.*\\.aux$",
"^.*\\.pdf$",
"^.*\\.toc$",
"^.*\\.bak$",
"^.*\\.pyc$",
"^.*\\.dep$",
"^.*\\.log$",
"^.*\\.ilk$",
"^.*\\.pdb$",
"^.*\\.exp$",
"^.*\\.lib$",
"^.*\\.ui$",
"^.*Makefile$",
"^.*\\.plg$",
]
DocFilter = ["^.*\\.o$", "^Debug$"]
|
autokey | monitor | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Chris Dekter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os.path
import threading
import time
from pyinotify import EventsCodes, Notifier, ProcessEvent, WatchManager
logger = __import__("autokey.logger").logger.get_logger(__name__)
m = EventsCodes.OP_FLAGS
MASK = (
m["IN_CREATE"]
| m["IN_MODIFY"]
| m["IN_DELETE"]
| m["IN_MOVED_TO"]
| m["IN_MOVED_FROM"]
)
class Processor(ProcessEvent):
def __init__(self, monitor, listener):
ProcessEvent.__init__(self)
self.listener = listener
self.monitor = monitor
def __getEventPath(self, event):
if event.name != "":
path = os.path.join(event.path, event.name)
else:
path = event.path
logger.debug("Reporting %s event at %s", event.maskname, path)
return path
def process_IN_MOVED_TO(self, event):
path = self.__getEventPath(event)
if not self.monitor.is_suspended():
self.listener.path_created_or_modified(path)
def process_IN_CREATE(self, event):
path = self.__getEventPath(event)
if not self.monitor.is_suspended():
self.listener.path_created_or_modified(path)
def process_IN_MODIFY(self, event):
path = self.__getEventPath(event)
if not self.monitor.is_suspended():
self.listener.path_created_or_modified(path)
def process_IN_DELETE(self, event):
path = self.__getEventPath(event)
if not self.monitor.is_suspended():
self.listener.path_removed(path)
def process_IN_MOVED_FROM(self, event):
path = self.__getEventPath(event)
if not self.monitor.is_suspended():
self.listener.path_removed(path)
class FileMonitor(threading.Thread):
def __init__(self, listener):
threading.Thread.__init__(self)
self.__p = Processor(self, listener)
self.manager = WatchManager()
self.notifier = Notifier(self.manager, self.__p)
self.event = threading.Event()
self.setDaemon(True)
self.watches = []
self.__isSuspended = False
def suspend(self):
self.__isSuspended = True
def unsuspend(self):
t = threading.Thread(target=self.__unsuspend)
t.start()
def __unsuspend(self):
time.sleep(1.5)
self.__isSuspended = False
for watch in self.watches:
if not os.path.exists(watch):
logger.debug("Removed stale watch on %s", watch)
self.watches.remove(watch)
def is_suspended(self):
return self.__isSuspended
def has_watch(self, path):
return path in self.watches
def add_watch(self, path):
logger.debug("Adding watch for %s", path)
self.manager.add_watch(path, MASK, self.__p)
self.watches.append(path)
def remove_watch(self, path):
logger.debug("Removing watch for %s", path)
wd = self.manager.get_wd(path)
self.manager.rm_watch(wd, True)
self.watches.remove(path)
for i in range(len(self.watches)):
try:
if self.watches[i].startswith(path):
self.watches.remove(self.watches[i])
except IndexError:
break
def run(self):
while not self.event.isSet():
self.notifier.process_events()
if self.notifier.check_events(1000):
self.notifier.read_events()
logger.info("Shutting down file monitor")
self.notifier.stop()
def stop(self):
self.event.set()
self.join()
|
util | Event | # Based on http://stackoverflow.com/a/2022629
class Event(list):
def __call__(self, *args, **kwargs):
for f in self[:]:
if "once" in dir(f) and f in self:
self.remove(f)
f(*args, **kwargs)
def __repr__(self):
return "Event(%s)" % list.__repr__(self)
def once(self, func, name=None):
func.once = True
func.name = None
if name: # Dont function with same name twice
names = [f.name for f in self if "once" in dir(f)]
if name not in names:
func.name = name
self.append(func)
else:
self.append(func)
return self
if __name__ == "__main__":
def testBenchmark():
def say(pre, text):
print("%s Say: %s" % (pre, text))
import time
s = time.time()
on_changed = Event()
for i in range(1000):
on_changed.once(lambda pre: say(pre, "once"), "once")
print("Created 1000 once in %.3fs" % (time.time() - s))
on_changed("#1")
def testUsage():
def say(pre, text):
print("%s Say: %s" % (pre, text))
on_changed = Event()
on_changed.once(lambda pre: say(pre, "once"))
on_changed.once(lambda pre: say(pre, "once"))
on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
on_changed.append(lambda pre: say(pre, "always"))
on_changed("#1")
on_changed("#2")
on_changed("#3")
testBenchmark()
|
neubot | runner_updates | # neubot/runner_updates.py
#
# Copyright (c) 2012 Simone Basso <bassosimone@gmail.com>,
# NEXA Center for Internet & Society at Politecnico di Torino
#
# This file is part of Neubot <http://www.neubot.org/>.
#
# Neubot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Neubot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Neubot. If not, see <http://www.gnu.org/licenses/>.
#
""" Available updates information """
# Adapted from runner_lst.py
#
# This component is periodically updated by the rendezvous
# component and keeps track of the update information, so
# that other components can ask it whether there is an update
# and, in case, where to download it from.
#
class RunnerUpdates(object):
"""Available updates information"""
# Adapted from runner_lst.py
def __init__(self):
"""Initialize"""
self.updates = {}
def update(self, updates):
"""Update the list of available tests"""
# For now just trust what the rendezvous passes us
self.updates = updates
def get_update_version(self):
"""Return available update version"""
return self.updates.get("version")
def get_update_uri(self):
"""Return available update URI"""
return self.updates.get("uri")
RUNNER_UPDATES = RunnerUpdates()
|
Material | importFCMat | # ***************************************************************************
# * Copyright (c) 2013 Juergen Riegel <FreeCAD@juergen-riegel.net> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD material card importer"
__author__ = "Juergen Riegel"
__url__ = "https://www.freecad.org"
import os
import FreeCAD
import Material
from materialtools.cardutils import get_material_template
if FreeCAD.GuiUp:
from PySide import QtGui
# to distinguish python built-in open function from the one declared below
if open.__module__ in ["__builtin__", "io"]:
pythonopen = open
def open(filename):
"called when freecad wants to open a file"
docname = os.path.splitext(os.path.basename(filename))[0]
doc = FreeCAD.newDocument(docname)
doc.Label = docname
FreeCAD.ActiveDocument = doc
read(filename)
return doc
def insert(filename, docname):
"called when freecad wants to import a file"
try:
doc = FreeCAD.getDocument(docname)
except NameError:
doc = FreeCAD.newDocument(docname)
FreeCAD.ActiveDocument = doc
read(filename)
return doc
def export(exportList, filename):
"called when freecad exports a file"
return
def decode(name):
"decodes encoded strings"
try:
decodedName = name.decode("utf8")
except UnicodeDecodeError:
try:
decodedName = name.decode("latin1")
except UnicodeDecodeError:
FreeCAD.Console.PrintError("Error: Couldn't determine character encoding")
decodedName = name
return decodedName
# the reader and writer do not use some Library to read and write the ini file format
# they are implemented here
# thus non standard ini files will be read and written too
# in standard ini file format:
# a = in the value without any encapsulation or string quotes is not allowed (AFAIK)
# https://en.wikipedia.org/wiki/INI_file
# http://www.docuxplorer.com/WebHelp/INI_File_Format.htm
# mainly this parser here is used in FreeCAD
# in the module Material.py is another implementation of reading and writing FCMat files
# the implementation in Material.py uses Pythons ConfigParser module
# in ViewProviderFemMaterial in add_cards_from_a_dir() the parser from Material.py is used
# since this mixture seems to have be there for ages it should not be changed for 0.18
# TODO and FIXME:
# get rid of this mixture
# best might be to switch to a more robust file schema like YAML
# as we had and we might will have problems again and again
# https://github.com/berndhahnebach/FreeCAD_bhb/commits/materialdev
def read(filename):
materialManager = Material.MaterialManager()
material = materialManager.getMaterialByPath(filename)
return material.Properties
# Metainformation
# first two lines HAVE, REALLY HAVE to be the same (no comment) in any card file !!!!!
# first five lines are the same in any card file
# Line1: card name
# Line2: author and licence
# Line3: information string
# Line4: information link
# Line5: FreeCAD version info or empty
def read_old(filename):
"reads a FCMat file and returns a dictionary from it"
# the reader returns a dictionary in any case even if the file has problems
# an empty dict is returned in such case
# print(filename)
card_name_file = os.path.splitext(os.path.basename(filename))[0]
f = pythonopen(filename, encoding="utf8")
try:
content = f.readlines()
# print(len(content))
# print(type(content))
# print(content)
except Exception:
# https://forum.freecad.org/viewtopic.php?f=18&t=56912#p489721
# older FreeCAD do not write utf-8 for special character on windows
# I have seen "ISO-8859-15" or "windows-1252"
# explicit utf-8 writing, https://github.com/FreeCAD/FreeCAD/commit/9a564dd906f
FreeCAD.Console.PrintError("Error on card loading. File might not utf-8.")
error_message = "Error on loading. Material file '{}' might not utf-8.".format(
filename
)
FreeCAD.Console.PrintError("{}\n".format(error_message))
if FreeCAD.GuiUp:
QtGui.QMessageBox.critical(None, "Error on card reading", error_message)
return {}
d = {}
d["CardName"] = card_name_file # CardName is the MatCard file name
for ln, line in enumerate(content):
# print(line)
ln += (
1 # enumerate starts with 0, but we would like to have the real line number
)
# line numbers are used for CardName and AuthorAndLicense
# the use of line number is not smart for a data model
# a wrong user edit could break the file
# comment
if line.startswith("#"):
# a '#' is assumed to be a comment which is ignored
continue
# CardName
if line.startswith(";") and ln == 1:
# print("Line CardName: {}".format(line))
v = line.split(";")[1].strip() # Line 1
if hasattr(v, "decode"):
v = v.decode("utf-8")
card_name_content = v
if card_name_content != d["CardName"]:
FreeCAD.Console.PrintLog(
"File CardName ( {} ) is not content CardName ( {} )\n".format(
card_name_file, card_name_content
)
)
# AuthorAndLicense
elif line.startswith(";") and ln == 2:
# print("Line AuthorAndLicense: {}".format(line))
v = line.split(";")[1].strip() # Line 2
if hasattr(v, "decode"):
v = v.decode("utf-8")
d["AuthorAndLicense"] = v
# rest
else:
# ; is a Comment
# [ is a Section
if line[0] not in ";[":
# split once on first occurrence
# a link could contain a '=' and thus would be split
k = line.split("=", 1)
if len(k) == 2:
v = k[1].strip()
if hasattr(v, "decode"):
v = v.decode("utf-8")
d[k[0].strip()] = v
return d
def read2(filename):
"reads a FCMat file and returns a dictionary from it"
# the reader returns a dictionary in any case even if the file has problems
# an empty dict is returned in such case
# print(filename)
card_name_file = os.path.splitext(os.path.basename(filename))[0]
f = pythonopen(filename, encoding="utf8")
try:
content = f.readlines()
# print(len(content))
# print(type(content))
# print(content)
except Exception:
# https://forum.freecad.org/viewtopic.php?f=18&t=56912#p489721
# older FreeCAD do not write utf-8 for special character on windows
# I have seen "ISO-8859-15" or "windows-1252"
# explicit utf-8 writing, https://github.com/FreeCAD/FreeCAD/commit/9a564dd906f
FreeCAD.Console.PrintError("Error on card loading. File might not utf-8.")
error_message = "Error on loading. Material file '{}' might not utf-8.".format(
filename
)
FreeCAD.Console.PrintError("{}\n".format(error_message))
if FreeCAD.GuiUp:
QtGui.QMessageBox.critical(None, "Error on card reading", error_message)
return {}
d = {}
d["Meta"] = {}
d["General"] = {}
d["Mechanical"] = {}
d["Fluidic"] = {}
d["Thermal"] = {}
d["Electromagnetic"] = {}
d["Architectural"] = {}
d["Rendering"] = {}
d["VectorRendering"] = {}
d["Cost"] = {}
d["UserDefined"] = {}
d["Meta"]["CardName"] = card_name_file # CardName is the MatCard file name
section = ""
for ln, line in enumerate(content):
# print(line)
# enumerate starts with 0
# line numbers are used for CardName and AuthorAndLicense
# the use of line number is not smart for a data model
# a wrong user edit could break the file
# comment
if line.startswith("#"):
# a '#' is assumed to be a comment which is ignored
continue
# CardName
if line.startswith(";") and ln == 0:
# print("Line CardName: {}".format(line))
v = line.split(";")[1].strip() # Line 1
if hasattr(v, "decode"):
v = v.decode("utf-8")
card_name_content = v
if card_name_content != d["Meta"]["CardName"]:
FreeCAD.Console.PrintLog(
"File CardName ( {} ) is not content CardName ( {} )\n".format(
card_name_file, card_name_content
)
)
# AuthorAndLicense
elif line.startswith(";") and ln == 1:
# print("Line AuthorAndLicense: {}".format(line))
v = line.split(";")[1].strip() # Line 2
if hasattr(v, "decode"):
v = v.decode("utf-8")
d["General"]["AuthorAndLicense"] = v # Move the field to the general group
# rest
else:
# ; is a Comment
# [ is a Section
if line[0] == "[":
# print("parse section '{0}'".format(line))
line = line[1:]
# print("\tline '{0}'".format(line))
k = line.split("]", 1)
if len(k) >= 2:
v = k[0].strip()
if hasattr(v, "decode"):
v = v.decode("utf-8")
section = v
# print("Section '{0}'".format(section))
elif line[0] not in ";":
# split once on first occurrence
# a link could contain a '=' and thus would be split
k = line.split("=", 1)
if len(k) == 2:
v = k[1].strip()
if hasattr(v, "decode"):
v = v.decode("utf-8")
# print("key '{0}', value '{1}'".format(k[0].strip(), v))
d[section][k[0].strip()] = v
return d
def write(filename, dictionary, write_group_section=True):
"writes the given dictionary to the given file"
# sort the data into sections
contents = []
user = {}
template_data = get_material_template()
for group in template_data:
groupName = list(group)[0] # group dict has only one key
contents.append({"keyname": groupName})
if groupName == "Meta":
header = contents[-1]
elif groupName == "UserDefined":
user = contents[-1]
for properName in group[groupName]:
contents[-1][properName] = ""
for k, i in dictionary.items():
found = False
for group in contents:
if not found:
if k in group:
group[k] = i
found = True
if not found:
user[k] = i
# delete empty properties
for group in contents:
# iterating over a dict and changing it is not allowed
# thus it is iterated over a list of the keys
for k in list(group):
if group[k] == "":
del group[k]
# card writer
rev = "{}.{}.{}".format(
FreeCAD.ConfigGet("BuildVersionMajor"),
FreeCAD.ConfigGet("BuildVersionMinor"),
FreeCAD.ConfigGet("BuildRevision"),
)
# print(filename)
card_name_file = os.path.splitext(os.path.basename(filename))[0]
# print(card_name_file)
if "CardName" not in header:
print(header)
error_message = "No card name provided. Card could not be written.".format(
header
)
FreeCAD.Console.PrintError("{}\n".format(error_message))
if FreeCAD.GuiUp:
QtGui.QMessageBox.critical(None, "No card name", error_message)
return
f = pythonopen(filename, "w", encoding="utf-8")
# write header
# first five lines are the same in any card file, see comment above read def
if header["CardName"] != card_name_file:
# CardName is the MatCard file name
FreeCAD.Console.PrintWarning(
"The file name {} is not equal to the card name {}. The file name is used.".format(
card_name_file, header["CardName"]
)
)
f.write("; " + card_name_file + "\n")
# f.write("; " + header["AuthorAndLicense"] + "\n")
f.write("; " + header.get("AuthorAndLicense", "no author") + "\n")
f.write("; information about the content of such cards can be found on the wiki:\n")
f.write("; https://www.freecad.org/wiki/Material\n")
f.write("; file created by FreeCAD " + rev + "\n")
# write sections
# write standard FCMat section if write group section parameter is set to False
if write_group_section is False:
f.write("\n[FCMat]\n")
for s in contents:
if s["keyname"] != "Meta":
# if the section has no contents, we don't write it
if len(s) > 1:
# only write group section if write group section parameter is set to True
if write_group_section is True:
f.write("\n[" + s["keyname"] + "]\n")
for k, i in s.items():
if (k != "keyname" and i != "") or k == "Name":
# use only keys which are not empty and the name, even if empty
f.write(k + " = " + i + "\n")
f.close()
# ***** some code examples ***********************************************************************
"""
from materialtools.cardutils import get_source_path as getsrc
from importFCMat import read, write
readmatfile = getsrc() + '/src/Mod/Material/StandardMaterial/Concrete-Generic.FCMat'
writematfile = '/tmp/Concrete-Generic.FCMat'
matdict = read(readmatfile)
matdict
write(writematfile, matdict)
"""
|
vidcutter | videostyle | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#######################################################################
#
# VidCutter - media cutter & joiner
#
# copyright © 2018 Pete Alexandrou
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#######################################################################
from functools import partial
from PyQt5.QtCore import QFile, QFileInfo, Qt, QTextStream
from PyQt5.QtGui import QColor, QPalette
from PyQt5.QtWidgets import qApp
try:
from PyQt5.QtWidgets import QProxyStyle
except ImportError:
from PyQt5.QtWidgets import QCommonStyle, QStyleFactory
class QProxyStyle(QCommonStyle):
# workaround for earlier version of PyQt5 when QProxyStyle did not exist
def __init__(self):
self._style = QStyleFactory.create(qApp.style().objectName())
for method in {
"drawComplexControl",
"drawControl",
"drawPrimitive",
"drawItemPixmap",
"generatedIconPixmap",
"hitTestComplexControl",
"layoutSpacing",
"pixelMetric",
"polish",
"sizeFromContents",
"standardPixmap",
"subControlRect",
"subElementRect",
"unpolish",
"itemPixmapRect",
"itemTextRect",
"styleHint",
"drawItemText",
}:
target = getattr(self._style, method)
setattr(self, method, partial(target))
super().__init__()
class VideoStyle(QProxyStyle):
# noinspection PyMethodOverriding
def styleHint(self, hint, option=None, widget=None, returnData=None) -> int:
if hint in {
self.proxy().SH_UnderlineShortcut,
self.proxy().SH_DialogButtons_DefaultButton,
self.proxy().SH_DialogButtonBox_ButtonsHaveIcons,
}:
return 0
return super(VideoStyle, self).styleHint(hint, option, widget, returnData)
@staticmethod
def loadQSS(theme) -> None:
filename = ":/styles/{}.qss".format(theme)
if QFileInfo(filename).exists():
qssfile = QFile(filename)
qssfile.open(QFile.ReadOnly | QFile.Text)
content = QTextStream(qssfile).readAll()
qApp.setStyleSheet(content)
class VideoStyleLight(VideoStyle):
def __init__(self):
super(VideoStyleLight, self).__init__()
palette = qApp.palette()
palette.setColor(QPalette.Window, QColor(239, 240, 241))
palette.setColor(QPalette.WindowText, QColor(49, 54, 59))
palette.setColor(QPalette.Base, QColor(252, 252, 252))
palette.setColor(QPalette.AlternateBase, QColor(239, 240, 241))
palette.setColor(QPalette.ToolTipBase, QColor(239, 240, 241))
palette.setColor(QPalette.ToolTipText, QColor(49, 54, 59))
palette.setColor(QPalette.Text, QColor(49, 54, 59))
palette.setColor(QPalette.Button, QColor(239, 240, 241))
palette.setColor(QPalette.ButtonText, QColor(49, 54, 59))
palette.setColor(QPalette.BrightText, QColor(255, 255, 255))
palette.setColor(QPalette.Link, QColor(41, 128, 185))
# palette.setColor(QPalette.Highlight, QColor(126, 71, 130))
# palette.setColor(QPalette.HighlightedText, Qt.white)
palette.setColor(QPalette.Disabled, QPalette.Light, Qt.white)
palette.setColor(QPalette.Disabled, QPalette.Shadow, QColor(234, 234, 234))
qApp.setPalette(palette)
class VideoStyleDark(VideoStyle):
def __init__(self):
super(VideoStyleDark, self).__init__()
palette = qApp.palette()
palette.setColor(QPalette.Window, QColor(27, 35, 38))
palette.setColor(QPalette.WindowText, QColor(234, 234, 234))
palette.setColor(QPalette.Base, QColor(27, 35, 38))
palette.setColor(QPalette.AlternateBase, QColor(12, 15, 16))
palette.setColor(QPalette.ToolTipBase, QColor(27, 35, 38))
palette.setColor(QPalette.ToolTipText, Qt.white)
palette.setColor(QPalette.Text, QColor(234, 234, 234))
palette.setColor(QPalette.Button, QColor(27, 35, 38))
palette.setColor(QPalette.ButtonText, Qt.white)
palette.setColor(QPalette.BrightText, QColor(100, 215, 222))
palette.setColor(QPalette.Link, QColor(126, 71, 130))
# palette.setColor(QPalette.Highlight, QColor(126, 71, 130))
# palette.setColor(QPalette.HighlightedText, Qt.white)
palette.setColor(QPalette.Disabled, QPalette.Light, Qt.black)
palette.setColor(QPalette.Disabled, QPalette.Shadow, QColor(12, 15, 16))
qApp.setPalette(palette)
|
schemas | fields | from CTFd.models import Fields, TeamFieldEntries, UserFieldEntries, db, ma
from marshmallow import fields
class FieldSchema(ma.ModelSchema):
class Meta:
model = Fields
include_fk = True
dump_only = ("id",)
class UserFieldEntriesSchema(ma.ModelSchema):
class Meta:
model = UserFieldEntries
sqla_session = db.session
include_fk = True
load_only = ("id",)
exclude = ("field", "user", "user_id")
dump_only = ("user_id", "name", "description", "type")
name = fields.Nested(FieldSchema, only=("name"), attribute="field")
description = fields.Nested(FieldSchema, only=("description"), attribute="field")
type = fields.Nested(FieldSchema, only=("field_type"), attribute="field")
class TeamFieldEntriesSchema(ma.ModelSchema):
class Meta:
model = TeamFieldEntries
sqla_session = db.session
include_fk = True
load_only = ("id",)
exclude = ("field", "team", "team_id")
dump_only = ("team_id", "name", "description", "type")
name = fields.Nested(FieldSchema, only=("name"), attribute="field")
description = fields.Nested(FieldSchema, only=("description"), attribute="field")
type = fields.Nested(FieldSchema, only=("field_type"), attribute="field")
|
utils | visualization_utils_test | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for image.understanding.object_detection.core.visualization_utils.
Testing with visualization in the following colab:
https://drive.google.com/a/google.com/file/d/0B5HnKS_hMsNARERpU3MtU3I5RFE/view?usp=sharing
"""
import os
import numpy as np
import PIL.Image as Image
import tensorflow as tf
from app.object_detection.utils import visualization_utils
_TESTDATA_PATH = 'object_detection/test_images'
class VisualizationUtilsTest(tf.test.TestCase):
def create_colorful_test_image(self):
"""This function creates an image that can be used to test vis functions.
It makes an image composed of four colored rectangles.
Returns:
colorful test numpy array image.
"""
ch255 = np.full([100, 200, 1], 255, dtype=np.uint8)
ch128 = np.full([100, 200, 1], 128, dtype=np.uint8)
ch0 = np.full([100, 200, 1], 0, dtype=np.uint8)
imr = np.concatenate((ch255, ch128, ch128), axis=2)
img = np.concatenate((ch255, ch255, ch0), axis=2)
imb = np.concatenate((ch255, ch0, ch255), axis=2)
imw = np.concatenate((ch128, ch128, ch128), axis=2)
imu = np.concatenate((imr, img), axis=1)
imd = np.concatenate((imb, imw), axis=1)
image = np.concatenate((imu, imd), axis=0)
return image
def test_draw_bounding_box_on_image(self):
test_image = self.create_colorful_test_image()
test_image = Image.fromarray(test_image)
width_original, height_original = test_image.size
ymin = 0.25
ymax = 0.75
xmin = 0.4
xmax = 0.6
visualization_utils.draw_bounding_box_on_image(test_image, ymin, xmin, ymax,
xmax)
width_final, height_final = test_image.size
self.assertEqual(width_original, width_final)
self.assertEqual(height_original, height_final)
def test_draw_bounding_box_on_image_array(self):
test_image = self.create_colorful_test_image()
width_original = test_image.shape[0]
height_original = test_image.shape[1]
ymin = 0.25
ymax = 0.75
xmin = 0.4
xmax = 0.6
visualization_utils.draw_bounding_box_on_image_array(
test_image, ymin, xmin, ymax, xmax)
width_final = test_image.shape[0]
height_final = test_image.shape[1]
self.assertEqual(width_original, width_final)
self.assertEqual(height_original, height_final)
def test_draw_bounding_boxes_on_image(self):
test_image = self.create_colorful_test_image()
test_image = Image.fromarray(test_image)
width_original, height_original = test_image.size
boxes = np.array([[0.25, 0.75, 0.4, 0.6],
[0.1, 0.1, 0.9, 0.9]])
visualization_utils.draw_bounding_boxes_on_image(test_image, boxes)
width_final, height_final = test_image.size
self.assertEqual(width_original, width_final)
self.assertEqual(height_original, height_final)
def test_draw_bounding_boxes_on_image_array(self):
test_image = self.create_colorful_test_image()
width_original = test_image.shape[0]
height_original = test_image.shape[1]
boxes = np.array([[0.25, 0.75, 0.4, 0.6],
[0.1, 0.1, 0.9, 0.9]])
visualization_utils.draw_bounding_boxes_on_image_array(test_image, boxes)
width_final = test_image.shape[0]
height_final = test_image.shape[1]
self.assertEqual(width_original, width_final)
self.assertEqual(height_original, height_final)
def test_draw_bounding_boxes_on_image_tensors(self):
"""Tests that bounding box utility produces reasonable results."""
category_index = {1: {'id': 1, 'name': 'dog'}, 2: {'id': 2, 'name': 'cat'}}
fname = os.path.join(_TESTDATA_PATH, 'image1.jpg')
image_np = np.array(Image.open(fname))
images_np = np.stack((image_np, image_np), axis=0)
with tf.Graph().as_default():
images_tensor = tf.constant(value=images_np, dtype=tf.uint8)
boxes = tf.constant([[[0.4, 0.25, 0.75, 0.75], [0.5, 0.3, 0.6, 0.9]],
[[0.25, 0.25, 0.75, 0.75], [0.1, 0.3, 0.6, 1.0]]])
classes = tf.constant([[1, 1], [1, 2]], dtype=tf.int64)
scores = tf.constant([[0.8, 0.1], [0.6, 0.5]])
images_with_boxes = (
visualization_utils.draw_bounding_boxes_on_image_tensors(
images_tensor,
boxes,
classes,
scores,
category_index,
min_score_thresh=0.2))
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
# Write output images for visualization.
images_with_boxes_np = sess.run(images_with_boxes)
self.assertEqual(images_np.shape, images_with_boxes_np.shape)
for i in range(images_with_boxes_np.shape[0]):
img_name = 'image_' + str(i) + '.png'
output_file = os.path.join(self.get_temp_dir(), img_name)
print 'Writing output image %d to %s' % (i, output_file)
image_pil = Image.fromarray(images_with_boxes_np[i, ...])
image_pil.save(output_file)
def test_draw_keypoints_on_image(self):
test_image = self.create_colorful_test_image()
test_image = Image.fromarray(test_image)
width_original, height_original = test_image.size
keypoints = [[0.25, 0.75], [0.4, 0.6], [0.1, 0.1], [0.9, 0.9]]
visualization_utils.draw_keypoints_on_image(test_image, keypoints)
width_final, height_final = test_image.size
self.assertEqual(width_original, width_final)
self.assertEqual(height_original, height_final)
def test_draw_keypoints_on_image_array(self):
test_image = self.create_colorful_test_image()
width_original = test_image.shape[0]
height_original = test_image.shape[1]
keypoints = [[0.25, 0.75], [0.4, 0.6], [0.1, 0.1], [0.9, 0.9]]
visualization_utils.draw_keypoints_on_image_array(test_image, keypoints)
width_final = test_image.shape[0]
height_final = test_image.shape[1]
self.assertEqual(width_original, width_final)
self.assertEqual(height_original, height_final)
def test_draw_mask_on_image_array(self):
test_image = np.asarray([[[0, 0, 0], [0, 0, 0]],
[[0, 0, 0], [0, 0, 0]]], dtype=np.uint8)
mask = np.asarray([[0, 1],
[1, 1]], dtype=np.uint8)
expected_result = np.asarray([[[0, 0, 0], [0, 0, 127]],
[[0, 0, 127], [0, 0, 127]]], dtype=np.uint8)
visualization_utils.draw_mask_on_image_array(test_image, mask,
color='Blue', alpha=.5)
self.assertAllEqual(test_image, expected_result)
def test_add_cdf_image_summary(self):
values = [0.1, 0.2, 0.3, 0.4, 0.42, 0.44, 0.46, 0.48, 0.50]
visualization_utils.add_cdf_image_summary(values, 'PositiveAnchorLoss')
cdf_image_summary = tf.get_collection(key=tf.GraphKeys.SUMMARIES)[0]
with self.test_session():
cdf_image_summary.eval()
if __name__ == '__main__':
tf.test.main()
|
gtk3 | menubar_osx | #
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
from deluge.configmanager import ConfigManager
from gi.repository import Gtk
macos_main_window_accelmap = {
"<Deluge-MainWindow>/File/Add Torrent": "<Meta>o",
"<Deluge-MainWindow>/File/Create Torrent": "<Meta>n",
"<Deluge-MainWindow>/File/Quit & Shutdown Daemon": "<Meta><Shift>q",
"<Deluge-MainWindow>/File/Quit": "<Meta>q",
"<Deluge-MainWindow>/Edit/Preferences": "<Meta>comma",
"<Deluge-MainWindow>/Edit/Connection Manager": "<Meta>m",
"<Deluge-MainWindow>/View/Find ...": "<Meta>f",
"<Deluge-MainWindow>/Help/FAQ": "<Meta>question",
}
def menubar_osx(gtkui, osxapp):
# Change key shortcuts
for accel_path, accelerator in macos_main_window_accelmap.items():
accel_key, accel_mods = Gtk.accelerator_parse(accelerator)
Gtk.AccelMap.change_entry(accel_path, accel_key, accel_mods, True)
main_builder = gtkui.mainwindow.get_builder()
menubar = main_builder.get_object("menubar")
config = ConfigManager("gtk3ui.conf")
file_menu = main_builder.get_object("menu_file").get_submenu()
file_items = file_menu.get_children()
quit_all_item = file_items[3]
for item in range(2, len(file_items)): # remove quits
file_menu.remove(file_items[item])
menu_widget = main_builder.get_object("menu_edit")
edit_menu = menu_widget.get_submenu()
edit_items = edit_menu.get_children()
pref_item = edit_items[0]
edit_menu.remove(pref_item)
conn_item = edit_items[1]
edit_menu.remove(conn_item)
menubar.remove(menu_widget)
help_menu = main_builder.get_object("menu_help").get_submenu()
help_items = help_menu.get_children()
about_item = help_items[4]
help_menu.remove(about_item)
help_menu.remove(help_items[3]) # separator
menubar.hide()
osxapp.set_menu_bar(menubar)
# populate app menu
osxapp.insert_app_menu_item(about_item, 0)
osxapp.insert_app_menu_item(Gtk.SeparatorMenuItem(), 1)
osxapp.insert_app_menu_item(pref_item, 2)
if not config["standalone"]:
osxapp.insert_app_menu_item(conn_item, 3)
if quit_all_item.get_visible():
osxapp.insert_app_menu_item(Gtk.SeparatorMenuItem(), 4)
osxapp.insert_app_menu_item(quit_all_item, 5)
|
PyObjCTest | test_ikscannerdeviceview | from PyObjCTools.TestSupport import *
from Quartz import *
class TestIKScannerDeviceView(TestCase):
@min_os_level("10.6")
def testConstants10_6(self):
self.assertEqual(IKScannerDeviceViewTransferModeFileBased, 0)
self.assertEqual(IKScannerDeviceViewTransferModeMemoryBased, 1)
self.assertEqual(IKScannerDeviceViewDisplayModeSimple, 0)
self.assertEqual(IKScannerDeviceViewDisplayModeAdvanced, 1)
@min_os_level("10.6")
def testMethods10_6(self):
self.assertResultIsBOOL(IKScannerDeviceView.hasDisplayModeSimple)
self.assertArgIsBOOL(IKScannerDeviceView.setHasDisplayModeSimple_, 0)
self.assertResultIsBOOL(IKScannerDeviceView.hasDisplayModeAdvanced)
self.assertArgIsBOOL(IKScannerDeviceView.setHasDisplayModeAdvanced_, 0)
self.assertResultIsBOOL(IKScannerDeviceView.displaysDownloadsDirectoryControl)
self.assertArgIsBOOL(
IKScannerDeviceView.setDisplaysDownloadsDirectoryControl_, 0
)
self.assertResultIsBOOL(
IKScannerDeviceView.displaysPostProcessApplicationControl
)
self.assertArgIsBOOL(
IKScannerDeviceView.setDisplaysPostProcessApplicationControl_, 0
)
if __name__ == "__main__":
main()
|
migrations | 0311_dashboard_template_scope | # Generated by Django 3.2.16 on 2023-03-14 11:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("posthog", "0310_add_starter_dashboard_template"),
]
operations = [
migrations.AddField(
# safe to ignore null locking this table it has fewer than 10 items on it
model_name="dashboardtemplate",
name="scope",
field=models.CharField(
choices=[("team", "Only team"), ("global", "Global")],
max_length=24,
null=True,
blank=True,
),
),
migrations.RunSQL(
# safe to ignore null locking this table it has fewer than 10 items on it
sql="""
UPDATE posthog_dashboardtemplate SET scope = 'global' WHERE team_id IS NULL -- not-null-ignore
""",
reverse_sql=migrations.RunSQL.noop,
),
migrations.RunSQL(
# safe to ignore null locking this table it has fewer than 10 items on it
sql="""
UPDATE posthog_dashboardtemplate SET scope = 'team' WHERE team_id IS NOT NULL -- not-null-ignore
""",
reverse_sql=migrations.RunSQL.noop,
),
]
|
books | books | """ the good stuff! the books! """
from uuid import uuid4
from bookwyrm import forms, models
from bookwyrm.activitypub import ActivitypubResponse
from bookwyrm.connectors import ConnectorException, connector_manager
from bookwyrm.connectors.abstract_connector import get_image
from bookwyrm.settings import PAGE_LENGTH
from bookwyrm.views.helpers import is_api_request, maybe_redirect_local_path
from django.contrib.auth.decorators import login_required, permission_required
from django.core.paginator import Paginator
from django.db.models import Avg, Q
from django.http import Http404
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.views import View
from django.views.decorators.http import require_POST
# pylint: disable=no-self-use
class Book(View):
"""a book! this is the stuff"""
def get(self, request, book_id, **kwargs):
"""info about a book"""
if is_api_request(request):
book = get_object_or_404(
models.Book.objects.select_subclasses(), id=book_id
)
return ActivitypubResponse(book.to_activity())
user_statuses = (
kwargs.get("user_statuses", False)
if request.user.is_authenticated
else False
)
# it's safe to use this OR because edition and work and subclasses of the same
# table, so they never have clashing IDs
book = (
models.Edition.viewer_aware_objects(request.user)
.filter(Q(id=book_id) | Q(parent_work__id=book_id))
.order_by("-edition_rank")
.select_related("parent_work")
.prefetch_related("authors", "file_links")
.first()
)
if not book or not book.parent_work:
raise Http404()
if redirect_local_path := not user_statuses and maybe_redirect_local_path(
request, book
):
return redirect_local_path
# all reviews for all editions of the book
reviews = models.Review.privacy_filter(request.user).filter(
book__parent_work__editions=book
)
# the reviews to show
if user_statuses:
if user_statuses == "review":
queryset = book.review_set.select_subclasses()
elif user_statuses == "comment":
queryset = book.comment_set
else:
queryset = book.quotation_set
queryset = queryset.filter(user=request.user, deleted=False)
else:
queryset = reviews.exclude(Q(content__isnull=True) | Q(content=""))
queryset = queryset.select_related("user").order_by("-published_date")
paginated = Paginator(queryset, PAGE_LENGTH)
lists = models.List.privacy_filter(
request.user,
).filter(
listitem__approved=True,
listitem__book__in=book.parent_work.editions.all(),
)
data = {
"book": book,
"statuses": paginated.get_page(request.GET.get("page")),
"review_count": reviews.count(),
"ratings": reviews.filter(
Q(content__isnull=True) | Q(content="")
).select_related("user")
if not user_statuses
else None,
"rating": reviews.aggregate(Avg("rating"))["rating__avg"],
"lists": lists,
"update_error": kwargs.get("update_error", False),
}
if request.user.is_authenticated:
data["list_options"] = request.user.list_set.exclude(id__in=data["lists"])
data["file_link_form"] = forms.FileLinkForm()
readthroughs = models.ReadThrough.objects.filter(
user=request.user,
book=book,
).order_by("start_date")
for readthrough in readthroughs:
readthrough.progress_updates = (
readthrough.progressupdate_set.all().order_by("-updated_date")
)
data["readthroughs"] = readthroughs
data["user_shelfbooks"] = models.ShelfBook.objects.filter(
user=request.user, book=book
).select_related("shelf")
data["other_edition_shelves"] = models.ShelfBook.objects.filter(
~Q(book=book),
user=request.user,
book__parent_work=book.parent_work,
).select_related("shelf", "book")
filters = {"user": request.user, "deleted": False}
data["user_statuses"] = {
"review_count": book.review_set.filter(**filters).count(),
"comment_count": book.comment_set.filter(**filters).count(),
"quotation_count": book.quotation_set.filter(**filters).count(),
}
return TemplateResponse(request, "book/book.html", data)
@login_required
@require_POST
def upload_cover(request, book_id):
"""upload a new cover"""
book = get_object_or_404(models.Edition, id=book_id)
book.last_edited_by = request.user
url = request.POST.get("cover-url")
if url:
image = set_cover_from_url(url)
if image:
book.cover.save(*image)
return redirect(f"{book.local_path}?cover_error=True")
form = forms.CoverForm(request.POST, request.FILES, instance=book)
if not form.is_valid() or not form.files.get("cover"):
return redirect(book.local_path)
book.cover = form.files["cover"]
book.save()
return redirect(book.local_path)
def set_cover_from_url(url):
"""load it from a url"""
try:
image_content, extension = get_image(url)
except: # pylint: disable=bare-except
return None
if not image_content:
return None
image_name = str(uuid4()) + "." + extension
return [image_name, image_content]
@login_required
@require_POST
@permission_required("bookwyrm.edit_book", raise_exception=True)
def add_description(request, book_id):
"""upload a new cover"""
book = get_object_or_404(models.Edition, id=book_id)
description = request.POST.get("description")
book.description = description
book.last_edited_by = request.user
book.save(update_fields=["description", "last_edited_by"])
return redirect("book", book.id)
@login_required
@require_POST
def resolve_book(request):
"""figure out the local path to a book from a remote_id"""
remote_id = request.POST.get("remote_id")
connector = connector_manager.get_or_create_connector(remote_id)
book = connector.get_or_create_book(remote_id)
return redirect("book", book.id)
@login_required
@require_POST
@permission_required("bookwyrm.edit_book", raise_exception=True)
# pylint: disable=unused-argument
def update_book_from_remote(request, book_id, connector_identifier):
"""load the remote data for this book"""
connector = connector_manager.load_connector(
get_object_or_404(models.Connector, identifier=connector_identifier)
)
book = get_object_or_404(models.Book.objects.select_subclasses(), id=book_id)
try:
connector.update_book_from_remote(book)
except ConnectorException:
# the remote source isn't available or doesn't know this book
return Book().get(request, book_id, update_error=True)
return redirect("book", book.id)
|
process | plugins | """Site services for use with a Web Site Process Bus."""
import os
import re
import signal as _signal
import sys
import threading
import time
from cherrypy._cpcompat import (
SetDaemonProperty,
Timer,
basestring,
get_daemon,
get_thread_ident,
ntob,
set,
)
# _module__file__base is used by Autoreload to make
# absolute any filenames retrieved from sys.modules which are not
# already absolute paths. This is to work around Python's quirk
# of importing the startup script and using a relative filename
# for it in sys.modules.
#
# Autoreload examines sys.modules afresh every time it runs. If an application
# changes the current directory by executing os.chdir(), then the next time
# Autoreload runs, it will not be able to find any filenames which are
# not absolute paths, because the current directory is not the same as when the
# module was first imported. Autoreload will then wrongly conclude the file
# has "changed", and initiate the shutdown/re-exec sequence.
# See ticket #917.
# For this workaround to have a decent probability of success, this module
# needs to be imported as early as possible, before the app has much chance
# to change the working directory.
_module__file__base = os.getcwd()
class SimplePlugin(object):
"""Plugin base class which auto-subscribes methods for known channels."""
bus = None
"""A :class:`Bus <cherrypy.process.wspbus.Bus>`, usually cherrypy.engine.
"""
def __init__(self, bus):
self.bus = bus
def subscribe(self):
"""Register this object as a (multi-channel) listener on the bus."""
for channel in self.bus.listeners:
# Subscribe self.start, self.exit, etc. if present.
method = getattr(self, channel, None)
if method is not None:
self.bus.subscribe(channel, method)
def unsubscribe(self):
"""Unregister this object as a listener on the bus."""
for channel in self.bus.listeners:
# Unsubscribe self.start, self.exit, etc. if present.
method = getattr(self, channel, None)
if method is not None:
self.bus.unsubscribe(channel, method)
class SignalHandler(object):
"""Register bus channels (and listeners) for system signals.
You can modify what signals your application listens for, and what it does
when it receives signals, by modifying :attr:`SignalHandler.handlers`,
a dict of {signal name: callback} pairs. The default set is::
handlers = {'SIGTERM': self.bus.exit,
'SIGHUP': self.handle_SIGHUP,
'SIGUSR1': self.bus.graceful,
}
The :func:`SignalHandler.handle_SIGHUP`` method calls
:func:`bus.restart()<cherrypy.process.wspbus.Bus.restart>`
if the process is daemonized, but
:func:`bus.exit()<cherrypy.process.wspbus.Bus.exit>`
if the process is attached to a TTY. This is because Unix window
managers tend to send SIGHUP to terminal windows when the user closes them.
Feel free to add signals which are not available on every platform.
The :class:`SignalHandler` will ignore errors raised from attempting
to register handlers for unknown signals.
"""
handlers = {}
"""A map from signal names (e.g. 'SIGTERM') to handlers (e.g. bus.exit)."""
signals = {}
"""A map from signal numbers to names."""
for k, v in vars(_signal).items():
if k.startswith("SIG") and not k.startswith("SIG_"):
signals[v] = k
del k, v
def __init__(self, bus):
self.bus = bus
# Set default handlers
self.handlers = {
"SIGTERM": self.bus.exit,
"SIGHUP": self.handle_SIGHUP,
"SIGUSR1": self.bus.graceful,
}
if sys.platform[:4] == "java":
del self.handlers["SIGUSR1"]
self.handlers["SIGUSR2"] = self.bus.graceful
self.bus.log(
"SIGUSR1 cannot be set on the JVM platform. " "Using SIGUSR2 instead."
)
self.handlers["SIGINT"] = self._jython_SIGINT_handler
self._previous_handlers = {}
def _jython_SIGINT_handler(self, signum=None, frame=None):
# See http://bugs.jython.org/issue1313
self.bus.log("Keyboard Interrupt: shutting down bus")
self.bus.exit()
def subscribe(self):
"""Subscribe self.handlers to signals."""
for sig, func in self.handlers.items():
try:
self.set_handler(sig, func)
except ValueError:
pass
def unsubscribe(self):
"""Unsubscribe self.handlers from signals."""
for signum, handler in self._previous_handlers.items():
signame = self.signals[signum]
if handler is None:
self.bus.log("Restoring %s handler to SIG_DFL." % signame)
handler = _signal.SIG_DFL
else:
self.bus.log("Restoring %s handler %r." % (signame, handler))
try:
our_handler = _signal.signal(signum, handler)
if our_handler is None:
self.bus.log(
"Restored old %s handler %r, but our "
"handler was not registered." % (signame, handler),
level=30,
)
except ValueError:
self.bus.log(
"Unable to restore %s handler %r." % (signame, handler),
level=40,
traceback=True,
)
def set_handler(self, signal, listener=None):
"""Subscribe a handler for the given signal (number or name).
If the optional 'listener' argument is provided, it will be
subscribed as a listener for the given signal's channel.
If the given signal name or number is not available on the current
platform, ValueError is raised.
"""
if isinstance(signal, basestring):
signum = getattr(_signal, signal, None)
if signum is None:
raise ValueError("No such signal: %r" % signal)
signame = signal
else:
try:
signame = self.signals[signal]
except KeyError:
raise ValueError("No such signal: %r" % signal)
signum = signal
prev = _signal.signal(signum, self._handle_signal)
self._previous_handlers[signum] = prev
if listener is not None:
self.bus.log("Listening for %s." % signame)
self.bus.subscribe(signame, listener)
def _handle_signal(self, signum=None, frame=None):
"""Python signal handler (self.set_handler subscribes it for you)."""
signame = self.signals[signum]
self.bus.log("Caught signal %s." % signame)
self.bus.publish(signame)
def handle_SIGHUP(self):
"""Restart if daemonized, else exit."""
if os.isatty(sys.stdin.fileno()):
# not daemonized (may be foreground or background)
self.bus.log("SIGHUP caught but not daemonized. Exiting.")
self.bus.exit()
else:
self.bus.log("SIGHUP caught while daemonized. Restarting.")
self.bus.restart()
try:
import grp
import pwd
except ImportError:
pwd, grp = None, None
class DropPrivileges(SimplePlugin):
"""Drop privileges. uid/gid arguments not available on Windows.
Special thanks to `Gavin Baker <http://antonym.org/2005/12/dropping-privileges-in-python.html>`_
"""
def __init__(self, bus, umask=None, uid=None, gid=None):
SimplePlugin.__init__(self, bus)
self.finalized = False
self.uid = uid
self.gid = gid
self.umask = umask
def _get_uid(self):
return self._uid
def _set_uid(self, val):
if val is not None:
if pwd is None:
self.bus.log("pwd module not available; ignoring uid.", level=30)
val = None
elif isinstance(val, basestring):
val = pwd.getpwnam(val)[2]
self._uid = val
uid = property(
_get_uid, _set_uid, doc="The uid under which to run. Availability: Unix."
)
def _get_gid(self):
return self._gid
def _set_gid(self, val):
if val is not None:
if grp is None:
self.bus.log("grp module not available; ignoring gid.", level=30)
val = None
elif isinstance(val, basestring):
val = grp.getgrnam(val)[2]
self._gid = val
gid = property(
_get_gid, _set_gid, doc="The gid under which to run. Availability: Unix."
)
def _get_umask(self):
return self._umask
def _set_umask(self, val):
if val is not None:
try:
os.umask
except AttributeError:
self.bus.log("umask function not available; ignoring umask.", level=30)
val = None
self._umask = val
umask = property(
_get_umask,
_set_umask,
doc="""The default permission mode for newly created files and
directories.
Usually expressed in octal format, for example, ``0644``.
Availability: Unix, Windows.
""",
)
def start(self):
# uid/gid
def current_ids():
"""Return the current (uid, gid) if available."""
name, group = None, None
if pwd:
name = pwd.getpwuid(os.getuid())[0]
if grp:
group = grp.getgrgid(os.getgid())[0]
return name, group
if self.finalized:
if not (self.uid is None and self.gid is None):
self.bus.log("Already running as uid: %r gid: %r" % current_ids())
else:
if self.uid is None and self.gid is None:
if pwd or grp:
self.bus.log("uid/gid not set", level=30)
else:
self.bus.log("Started as uid: %r gid: %r" % current_ids())
if self.gid is not None:
os.setgid(self.gid)
os.setgroups([])
if self.uid is not None:
os.setuid(self.uid)
self.bus.log("Running as uid: %r gid: %r" % current_ids())
# umask
if self.finalized:
if self.umask is not None:
self.bus.log("umask already set to: %03o" % self.umask)
else:
if self.umask is None:
self.bus.log("umask not set", level=30)
else:
old_umask = os.umask(self.umask)
self.bus.log("umask old: %03o, new: %03o" % (old_umask, self.umask))
self.finalized = True
# This is slightly higher than the priority for server.start
# in order to facilitate the most common use: starting on a low
# port (which requires root) and then dropping to another user.
start.priority = 77
class Daemonizer(SimplePlugin):
"""Daemonize the running script.
Use this with a Web Site Process Bus via::
Daemonizer(bus).subscribe()
When this component finishes, the process is completely decoupled from
the parent environment. Please note that when this component is used,
the return code from the parent process will still be 0 if a startup
error occurs in the forked children. Errors in the initial daemonizing
process still return proper exit codes. Therefore, if you use this
plugin to daemonize, don't use the return code as an accurate indicator
of whether the process fully started. In fact, that return code only
indicates if the process succesfully finished the first fork.
"""
def __init__(self, bus, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"):
SimplePlugin.__init__(self, bus)
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.finalized = False
def start(self):
if self.finalized:
self.bus.log("Already deamonized.")
# forking has issues with threads:
# http://www.opengroup.org/onlinepubs/000095399/functions/fork.html
# "The general problem with making fork() work in a multi-threaded
# world is what to do with all of the threads..."
# So we check for active threads:
if threading.activeCount() != 1:
self.bus.log(
"There are %r active threads. "
"Daemonizing now may cause strange failures." % threading.enumerate(),
level=30,
)
# See http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
# (or http://www.faqs.org/faqs/unix-faq/programmer/faq/ section 1.7)
# and http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012
# Finish up with the current stdout/stderr
sys.stdout.flush()
sys.stderr.flush()
# Do first fork.
try:
pid = os.fork()
if pid == 0:
# This is the child process. Continue.
pass
else:
# This is the first parent. Exit, now that we've forked.
self.bus.log("Forking once.")
os._exit(0)
except OSError:
# Python raises OSError rather than returning negative numbers.
exc = sys.exc_info()[1]
sys.exit(
"%s: fork #1 failed: (%d) %s\n" % (sys.argv[0], exc.errno, exc.strerror)
)
os.setsid()
# Do second fork
try:
pid = os.fork()
if pid > 0:
self.bus.log("Forking twice.")
os._exit(0) # Exit second parent
except OSError:
exc = sys.exc_info()[1]
sys.exit(
"%s: fork #2 failed: (%d) %s\n" % (sys.argv[0], exc.errno, exc.strerror)
)
os.chdir("/")
os.umask(0)
si = open(self.stdin, "r")
so = open(self.stdout, "a+")
se = open(self.stderr, "a+")
# os.dup2(fd, fd2) will close fd2 if necessary,
# so we don't explicitly close stdin/out/err.
# See http://docs.python.org/lib/os-fd-ops.html
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
self.bus.log("Daemonized to PID: %s" % os.getpid())
self.finalized = True
start.priority = 65
class PIDFile(SimplePlugin):
"""Maintain a PID file via a WSPBus."""
def __init__(self, bus, pidfile):
SimplePlugin.__init__(self, bus)
self.pidfile = pidfile
self.finalized = False
def start(self):
pid = os.getpid()
if self.finalized:
self.bus.log("PID %r already written to %r." % (pid, self.pidfile))
else:
open(self.pidfile, "wb").write(ntob("%s\n" % pid, "utf8"))
self.bus.log("PID %r written to %r." % (pid, self.pidfile))
self.finalized = True
start.priority = 70
def exit(self):
try:
os.remove(self.pidfile)
self.bus.log("PID file removed: %r." % self.pidfile)
except (KeyboardInterrupt, SystemExit):
raise
except:
pass
class PerpetualTimer(Timer):
"""A responsive subclass of threading.Timer whose run() method repeats.
Use this timer only when you really need a very interruptible timer;
this checks its 'finished' condition up to 20 times a second, which can
results in pretty high CPU usage
"""
def __init__(self, *args, **kwargs):
"Override parent constructor to allow 'bus' to be provided."
self.bus = kwargs.pop("bus", None)
super(PerpetualTimer, self).__init__(*args, **kwargs)
def run(self):
while True:
self.finished.wait(self.interval)
if self.finished.isSet():
return
try:
self.function(*self.args, **self.kwargs)
except Exception:
if self.bus:
self.bus.log(
"Error in perpetual timer thread function %r." % self.function,
level=40,
traceback=True,
)
# Quit on first error to avoid massive logs.
raise
class BackgroundTask(SetDaemonProperty, threading.Thread):
"""A subclass of threading.Thread whose run() method repeats.
Use this class for most repeating tasks. It uses time.sleep() to wait
for each interval, which isn't very responsive; that is, even if you call
self.cancel(), you'll have to wait until the sleep() call finishes before
the thread stops. To compensate, it defaults to being daemonic, which means
it won't delay stopping the whole process.
"""
def __init__(self, interval, function, args=[], kwargs={}, bus=None):
threading.Thread.__init__(self)
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.running = False
self.bus = bus
# default to daemonic
self.daemon = True
def cancel(self):
self.running = False
def run(self):
self.running = True
while self.running:
time.sleep(self.interval)
if not self.running:
return
try:
self.function(*self.args, **self.kwargs)
except Exception:
if self.bus:
self.bus.log(
"Error in background task thread function %r." % self.function,
level=40,
traceback=True,
)
# Quit on first error to avoid massive logs.
raise
class Monitor(SimplePlugin):
"""WSPBus listener to periodically run a callback in its own thread."""
callback = None
"""The function to call at intervals."""
frequency = 60
"""The time in seconds between callback runs."""
thread = None
"""A :class:`BackgroundTask<cherrypy.process.plugins.BackgroundTask>`
thread.
"""
def __init__(self, bus, callback, frequency=60, name=None):
SimplePlugin.__init__(self, bus)
self.callback = callback
self.frequency = frequency
self.thread = None
self.name = name
def start(self):
"""Start our callback in its own background thread."""
if self.frequency > 0:
threadname = self.name or self.__class__.__name__
if self.thread is None:
self.thread = BackgroundTask(
self.frequency, self.callback, bus=self.bus
)
self.thread.setName(threadname)
self.thread.start()
self.bus.log("Started monitor thread %r." % threadname)
else:
self.bus.log("Monitor thread %r already started." % threadname)
start.priority = 70
def stop(self):
"""Stop our callback's background task thread."""
if self.thread is None:
self.bus.log(
"No thread running for %s." % self.name or self.__class__.__name__
)
else:
if self.thread is not threading.currentThread():
name = self.thread.getName()
self.thread.cancel()
if not get_daemon(self.thread):
self.bus.log("Joining %r" % name)
self.thread.join()
self.bus.log("Stopped thread %r." % name)
self.thread = None
def graceful(self):
"""Stop the callback's background task thread and restart it."""
self.stop()
self.start()
class Autoreloader(Monitor):
"""Monitor which re-executes the process when files change.
This :ref:`plugin<plugins>` restarts the process (via :func:`os.execv`)
if any of the files it monitors change (or is deleted). By default, the
autoreloader monitors all imported modules; you can add to the
set by adding to ``autoreload.files``::
cherrypy.engine.autoreload.files.add(myFile)
If there are imported files you do *not* wish to monitor, you can
adjust the ``match`` attribute, a regular expression. For example,
to stop monitoring cherrypy itself::
cherrypy.engine.autoreload.match = r'^(?!cherrypy).+'
Like all :class:`Monitor<cherrypy.process.plugins.Monitor>` plugins,
the autoreload plugin takes a ``frequency`` argument. The default is
1 second; that is, the autoreloader will examine files once each second.
"""
files = None
"""The set of files to poll for modifications."""
frequency = 1
"""The interval in seconds at which to poll for modified files."""
match = ".*"
"""A regular expression by which to match filenames."""
def __init__(self, bus, frequency=1, match=".*"):
self.mtimes = {}
self.files = set()
self.match = match
Monitor.__init__(self, bus, self.run, frequency)
def start(self):
"""Start our own background task thread for self.run."""
if self.thread is None:
self.mtimes = {}
Monitor.start(self)
start.priority = 70
def sysfiles(self):
"""Return a Set of sys.modules filenames to monitor."""
files = set()
for k, m in list(sys.modules.items()):
if re.match(self.match, k):
if hasattr(m, "__loader__") and hasattr(m.__loader__, "archive"):
f = m.__loader__.archive
else:
f = getattr(m, "__file__", None)
if f is not None and not os.path.isabs(f):
# ensure absolute paths so a os.chdir() in the app
# doesn't break me
f = os.path.normpath(os.path.join(_module__file__base, f))
files.add(f)
return files
def run(self):
"""Reload the process if registered files have been modified."""
for filename in self.sysfiles() | self.files:
if filename:
if filename.endswith(".pyc"):
filename = filename[:-1]
oldtime = self.mtimes.get(filename, 0)
if oldtime is None:
# Module with no .py file. Skip it.
continue
try:
mtime = os.stat(filename).st_mtime
except OSError:
# Either a module with no .py file, or it's been deleted.
mtime = None
if filename not in self.mtimes:
# If a module has no .py file, this will be None.
self.mtimes[filename] = mtime
else:
if mtime is None or mtime > oldtime:
# The file has been deleted or modified.
self.bus.log("Restarting because %s changed." % filename)
self.thread.cancel()
self.bus.log("Stopped thread %r." % self.thread.getName())
self.bus.restart()
return
class ThreadManager(SimplePlugin):
"""Manager for HTTP request threads.
If you have control over thread creation and destruction, publish to
the 'acquire_thread' and 'release_thread' channels (for each thread).
This will register/unregister the current thread and publish to
'start_thread' and 'stop_thread' listeners in the bus as needed.
If threads are created and destroyed by code you do not control
(e.g., Apache), then, at the beginning of every HTTP request,
publish to 'acquire_thread' only. You should not publish to
'release_thread' in this case, since you do not know whether
the thread will be re-used or not. The bus will call
'stop_thread' listeners for you when it stops.
"""
threads = None
"""A map of {thread ident: index number} pairs."""
def __init__(self, bus):
self.threads = {}
SimplePlugin.__init__(self, bus)
self.bus.listeners.setdefault("acquire_thread", set())
self.bus.listeners.setdefault("start_thread", set())
self.bus.listeners.setdefault("release_thread", set())
self.bus.listeners.setdefault("stop_thread", set())
def acquire_thread(self):
"""Run 'start_thread' listeners for the current thread.
If the current thread has already been seen, any 'start_thread'
listeners will not be run again.
"""
thread_ident = get_thread_ident()
if thread_ident not in self.threads:
# We can't just use get_ident as the thread ID
# because some platforms reuse thread ID's.
i = len(self.threads) + 1
self.threads[thread_ident] = i
self.bus.publish("start_thread", i)
def release_thread(self):
"""Release the current thread and run 'stop_thread' listeners."""
thread_ident = get_thread_ident()
i = self.threads.pop(thread_ident, None)
if i is not None:
self.bus.publish("stop_thread", i)
def stop(self):
"""Release all threads and run all 'stop_thread' listeners."""
for thread_ident, i in self.threads.items():
self.bus.publish("stop_thread", i)
self.threads.clear()
graceful = stop
|
base | hoster | # -*- coding: utf-8 -*-
import inspect
import re
import time
import urllib.parse
from pyload.core.network.exceptions import Abort, Fail, Reconnect, Retry, Skip
from pyload.core.utils import format, parse
from pyload.core.utils.old import fixurl
from ..helpers import parse_html_form, replace_patterns
from .captcha import BaseCaptcha
from .plugin import BasePlugin
# TODO: Recheck in 0.6.x
def get_info(urls):
#: result = [ .. (name, size, status, url) .. ]
pass
# TODO: Remove in 0.6.x
def parse_file_info(klass, url="", html=""):
info = klass.get_info(url, html)
return info["name"], info["size"], info["status"], info["url"]
class BaseHoster(BasePlugin):
__name__ = "BaseHoster"
__type__ = "base"
__version__ = "0.41"
__status__ = "stable"
__pattern__ = r"^unmatchable$"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
]
__description__ = """Base hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
URL_REPLACEMENTS = []
def get_info(self, url="", html=""):
url = fixurl(url, unquote=True)
info = {
"name": parse.name(url),
"hash": {},
"pattern": {},
"size": 0,
"status": 7 if url else 8,
"url": replace_patterns(url, self.URL_REPLACEMENTS),
}
try:
info["pattern"] = re.match(self.__pattern__, url).groupdict()
except Exception:
pass
return info
def __init__(self, pyfile):
self._init(pyfile.m.pyload)
#: Engage want reconnection
self.want_reconnect = False
#: Enable simultaneous processing of multiple downloads
self.multi_dl = True
#: time.time() + wait in seconds
self.waiting = False
#: Account handler instance, see :py:class:`Account`
self.account = None
self.premium = None
#: Associated pyfile instance, see `PyFile`
self.pyfile = pyfile
#: Holds thread in future
self.thread = None
#: Captcha stuff
# TODO: Replace in 0.6.x:
# _Captcha = self.pyload.plugin_manager.load_class("anticaptcha", self.classname) or BaseCaptcha
# self.captcha = _Captcha(pyfile)
self.captcha = BaseCaptcha(pyfile)
#: Some plugins store html code here
self.data = ""
#: Dict of the amount of retries already made
self.retries = {}
self.init_base()
self.init()
def _log(self, level, plugintype, pluginname, args, kwargs):
log = getattr(self.pyload.log, level)
#: Hide any user/password
try:
user = self.account.user
hidden_user = "{:*<{}}".format(self.account.user[:3], 7)
args = tuple(arg.replace(user, hidden_user) for arg in args if arg)
except (AttributeError, KeyError, TypeError):
pass
try:
pw = self.account.info["login"]["password"]
hidden_pw = "*" * 10
args = tuple(arg.replace(pw, hidden_pw) for arg in args if arg)
except (AttributeError, KeyError, TypeError):
pass
log(
"{plugintype} {pluginname}[{id}]: {msg}".format(
plugintype=plugintype.upper(),
pluginname=pluginname,
id=self.pyfile.id,
msg=" | ".join(["%s"] * len(args)),
),
*args,
**kwargs,
)
def init_base(self):
pass
def setup_base(self):
pass
def setup(self):
"""
Setup for enviroment and other things, called before downloading (possibly more
than one time)
"""
pass
def _setup(self):
# TODO: Remove in 0.6.x
self.pyfile.error = ""
self.data = ""
self.last_html = ""
self.last_header = {}
if self.config.get("use_premium", True):
self.load_account() # TODO: Move to PluginThread in 0.6.x
else:
self.account = False
try:
self.req.close()
except Exception:
pass
if self.account:
self.req = self.pyload.request_factory.get_request(
self.classname, self.account.user
)
# NOTE: Avoid one unnecessary get_info call by `self.account.premium` here
self.premium = self.account.info["data"]["premium"]
else:
self.req = self.pyload.request_factory.get_request(self.classname)
self.premium = False
self.setup_base()
self.grab_info()
self.setup()
self.check_status()
def load_account(self):
if self.account is None:
self.account = self.pyload.account_manager.get_account_plugin(
self.classname
)
if self.account:
self.account.choose()
def _update_name(self):
name = self.info.get("name")
if name and name != self.info.get("url"):
self.pyfile.name = name
else:
name = self.pyfile.name
self.log_info(self._("Link name: {}").format(name))
def _update_size(self):
size = self.info.get("size")
if size > 0:
# TODO: Fix int conversion in 0.6.x
self.pyfile.size = int(self.info.get("size"))
else:
size = self.pyfile.size
if size:
self.log_info(
self._("Link size: {} ({} bytes)").format(format.size(size), size)
)
else:
self.log_info(self._("Link size: N/D"))
def _update_status(self):
self.pyfile.status = self.info.get("status", 14)
self.pyfile.sync()
self.log_info(self._("Link status: ") + self.pyfile.get_status_name())
def sync_info(self):
self._update_name()
self._update_size()
self._update_status()
def grab_info(self):
if self.pyfile.status != 2:
self.log_info(self._("Grabbing link info..."))
old_info = dict(self.info)
new_info = self.get_info(
replace_patterns(self.pyfile.url, self.URL_REPLACEMENTS), self.data
)
self.info.update(new_info)
self.log_debug(f"Link info: {self.info}")
self.log_debug(f"Previous link info: {old_info}")
self.sync_info()
def check_status(self):
status = self.pyfile.status
if status == 1:
self.offline()
elif status == 4:
self.skip(self.pyfile.statusname)
elif status == 6:
self.temp_offline()
elif status == 8:
self.fail()
elif status == 9 or self.pyfile.abort:
self.abort()
def _initialize(self):
self.log_debug("Plugin version: " + self.__version__)
self.log_debug("Plugin status: " + self.__status__)
if self.__status__ == "broken":
self.abort(self._("Plugin is temporarily unavailable"))
elif self.__status__ == "testing":
self.log_warning(self._("Plugin may be unstable"))
def _process(self, thread):
"""
Handles important things to do before starting.
"""
self.thread = thread
self._initialize()
self._setup()
# TODO: Enable in 0.6.x
# self.pyload.addon_manager.download_preparing(self.pyfile)
# self.check_status()
# TODO: Remove in 0.6.x
if self.__type__ == "decrypter":
self.pyload.addon_manager.download_preparing(self.pyfile)
self.check_status()
self.pyfile.set_status("starting")
self.log_info(self._("Processing url: ") + self.pyfile.url)
self.process(self.pyfile)
self.check_status()
#: Deprecated method, use `_process` instead (Remove in 0.6.x)
def preprocessing(self, *args, **kwargs):
# NOTE: Recheck info thread synchronization in 0.6.x
return self._process(*args, **kwargs)
def process(self, pyfile):
"""
The "main" method of every downloader plugin, you **have to** overwrite it.
"""
raise NotImplementedError
def set_reconnect(self, reconnect):
if self.pyload.config.get("reconnect", "enabled"):
reconnect = reconnect and self.pyload.api.is_time_reconnect()
self.log_debug(
"RECONNECT{} required".format("" if reconnect else " not"),
"Previous want_reconnect: {}".format(self.want_reconnect),
)
self.want_reconnect = bool(reconnect)
def set_wait(self, seconds, strict=False):
"""
Set a specific wait time later used with wait()
:param seconds: wait time in seconds
:param strict: strict mode
"""
wait_time = float(seconds)
if wait_time < 0:
return False
old_wait_until = self.pyfile.wait_until
new_wait_until = time.time() + wait_time + float(not strict)
self.log_debug(
"WAIT set to timestamp {}".format(new_wait_until),
"Previous wait_until: {}".format(old_wait_until),
)
self.pyfile.wait_until = new_wait_until
return True
def wait(self, seconds=None, reconnect=None):
"""
Waits the time previously set.
:param seconds: How many seconds to wait or if equals to None then use the value from set_wait()
:param reconnect: True if reconnect would avoid wait time
"""
if seconds is not None:
self.set_wait(seconds)
wait_time = self.pyfile.wait_until - time.time()
if wait_time < 1:
self.log_warning(self._("Invalid wait time interval"))
return
if reconnect is None:
reconnect = wait_time > self.config.get("max_wait", 10) * 60
self.set_reconnect(reconnect)
self.waiting = True
status = self.pyfile.status # NOTE: Recheck in 0.6.x
self.pyfile.set_status("waiting")
self.log_info(self._("Waiting {}...").format(format.time(wait_time)))
if self.want_reconnect:
self.log_info(self._("Requiring reconnection..."))
if self.account:
self.log_warning(self._("Reconnection ignored due logged account"))
if not self.want_reconnect or self.account:
while self.pyfile.wait_until > time.time():
self.check_status()
time.sleep(2)
else:
while self.pyfile.wait_until > time.time():
self.check_status()
self.thread.m.reconnecting.wait(1)
if self.thread.m.reconnecting.is_set():
self.waiting = False
self.want_reconnect = False
self.req.clear_cookies()
raise Reconnect
time.sleep(2)
self.waiting = False
self.pyfile.status = status # NOTE: Recheck in 0.6.x
def skip(self, msg=""):
"""
Skip and give msg.
"""
raise Skip(msg or self.pyfile.error or self.pyfile.pluginname)
# TODO: Remove in 0.6.x
def fail(self, msg=""):
"""
Fail and give msg.
"""
msg = msg.strip()
if msg:
self.pyfile.error = msg
else:
msg = (
self.pyfile.error
or self.info.get("error")
or self.pyfile.get_status_name()
)
raise Fail(msg)
def error(self, msg="", type="Parse"):
type = self._("{} error").format(
type.strip().capitalize() if type else self._("Unknown")
)
msg = self._("{type}: {msg} | Plugin may be out of date").format(
type=type, msg=msg or self.pyfile.error
)
self.fail(msg)
def abort(self, msg=""):
"""
Abort and give msg.
"""
if msg: # TODO: Remove in 0.6.x
self.pyfile.error = msg
raise Abort
# TODO: Recheck in 0.6.x
def offline(self, msg=""):
"""
Fail and indicate file is offline.
"""
self.fail("offline")
# TODO: Recheck in 0.6.x
def temp_offline(self, msg=""):
"""
Fail and indicates file ist temporary offline, the core may take consequences.
"""
self.fail("temp. offline")
def restart(self, msg="", premium=True):
if not msg:
msg = (
self._("Restart plugin")
if premium
else self._("Fallback to free processing")
)
if not premium:
if self.premium:
self.restart_free = True
else:
self.fail(
"{} | {}".format(msg, self._("Url was already processed as free"))
)
raise Retry(msg)
def retry(self, attempts=5, wait=1, msg="", msgfail="Max retries reached"):
"""
Retries and begin again from the beginning.
:param attempts: number of maximum retries
:param wait: time to wait in seconds before retry
:param msg: message to pass to retry if attempts value was not yet reached
:param msgfail: message passed to fail if attempts value was reached
"""
frame = inspect.currentframe()
try:
id = frame.f_back.f_lineno
finally:
del frame #: Delete the frame or it won't be cleaned
if id not in self.retries:
self.retries[id] = 0
if 0 < attempts <= self.retries[id]:
self.fail(msgfail)
self.retries[id] += 1
self.wait(wait)
raise Retry(msg)
def retry_captcha(
self, attempts=10, wait=1, msg="", msgfail="Max captcha retries reached"
):
self.captcha.invalid(msg)
self.retry(attempts, wait, msg=self._("Retry Captcha"), msgfail=msgfail)
def fixurl(self, url, baseurl=None):
baseurl = baseurl or self.pyfile.url
if not urllib.parse.urlparse(url).scheme:
url_p = urllib.parse.urlparse(baseurl)
baseurl = "{}://{}".format(url_p.scheme, url_p.netloc)
url = urllib.parse.urljoin(baseurl, url)
return url
def load(self, *args, **kwargs):
self.check_status()
return super().load(*args, **kwargs)
def parse_html_form(self, attr_str="", input_names={}):
return parse_html_form(attr_str, self.data, input_names)
def get_password(self):
"""
Get the password the user provided in the package.
"""
return self.pyfile.package().password or ""
def clean(self):
"""
Clean everything and remove references.
"""
super().clean()
for attr in ("account", "html", "pyfile", "thread"):
if hasattr(self, attr):
setattr(self, attr, None)
|
libraries | rhythmbox | # -*- coding: utf-8 -*-
# rhythmbox.py
# Copyright (C) 2008-2009 concentricpuddle
# This file is part of puddletag, a semi-good music tag editor.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import urllib.error
import urllib.parse
import urllib.request
from collections import defaultdict
from os import path
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
from PyQt5.QtCore import QDir, QSettings, QUrl
from PyQt5.QtWidgets import (
QFileDialog,
QHBoxLayout,
QLabel,
QLineEdit,
QPushButton,
QVBoxLayout,
QWidget,
)
from .. import audioinfo, musiclib
from ..util import translate
FILENAME, PATH = audioinfo.FILENAME, audioinfo.PATH
name = "Rhythmbox"
description = "Rhythmbox Database"
author = "concentricpuddle"
def getFilename(filename):
filename = urllib.request.url2pathname(filename)
if filename.startswith("file://"):
filename = filename[len("file://") :]
return {
"__dirpath": path.dirname(filename),
PATH: filename,
FILENAME: path.basename(filename),
"__ext": path.splitext(filename)[1][1:],
"__dirname": path.basename(path.dirname(filename)),
}
getTime = lambda date: audioinfo.strtime(int(date))
getCreated = lambda created: {"__created": getTime(created)}
getModified = lambda modified: {"__modified": getTime(modified)}
getLength = lambda length: {"__length": audioinfo.strlength(int(length))}
getBitRate = lambda bitrate: {"__bitrate": bitrate + " kb/s"}
CONVERSION = {
"title": "title",
"genre": "genre",
"artist": "artist",
"album": "album",
"track-number": "track",
"duration": getLength,
"file-size": "__size",
"location": getFilename,
"first-seen": getCreated,
"mtime": getModified,
"last-seen": "__last_seen",
"bitrate": getBitRate,
"disc-number": "discnumber",
}
setLength = lambda length: {"duration": str(audioinfo.lnglength(length))}
setCreated = lambda created: {"first-seen": str(audioinfo.lngtime(created))}
setBitrate = lambda bitrate: {"bitrate": str(audioinfo.lngfrequency(bitrate) / 1000)}
setModified = lambda modified: {"last-seen": str(audioinfo.lngtime(modified))}
setFilename = lambda filename: {
"location": "file://"
+ str(QUrl.toPercentEncoding(filename, "/()\"'")).encode("utf8")
}
RECONVERSION = {
"title": "title",
"artist": "artist",
"album": "album",
"track": "track-number",
"discnumber": "disc-number",
"genre": "genre",
"__length": setLength,
"__created": setCreated,
"__bitrate": setBitrate,
"__modified": setModified,
"__filename": setFilename,
"__size": "file-size",
}
SUPPORTEDTAGS = ["artist", "genre", "title", "track", "__size", "album"]
class DBParser(ContentHandler):
indent = " " * 4
def __init__(self):
# Information is stored as follows:
# self.albums is a dictionary with each key being an artist.
# self.albums[key] is also a dictionary with album names as keys
# and an integer specifying the index of the album in self.tracks
# self.tracks being a list of lists, each of which contains the
# track metadata for an album as dictionaries.
self.tagval = ""
self.name = ""
self.stargetting = False
self.values = {}
self.current = "#nothing"
self.tracks = []
self.albums = defaultdict(lambda: {})
self.extravalues = []
self.extras = False
self.extratype = ""
def characters(self, ch):
try:
self.values[self.current] += ch
except KeyError:
self.values[self.current] = ch
def endElement(self, name):
if name == "entry" and self.stargetting:
self.stargetting = False
if not self.extras:
tag = {}
for field, value in self.values.items():
try:
tag.update(CONVERSION[field](value.strip()))
except TypeError:
tag[CONVERSION[field]] = value.strip()
except KeyError:
tag["#" + field] = value.strip()
f = ((k, v.strip()) for k, v in tag.items())
tag = dict((k, v) for k, v in f if v)
album = tag.get("album", "")
artist = tag.get("artist", "")
albums = self.albums[artist]
if album not in albums:
albums[album] = len(self.tracks)
self.tracks.append([tag])
else:
self.tracks[albums[album]].append(tag)
else:
x = ((k, v.strip()) for k, v in self.values.items())
x = dict((k, v) for k, v in x if v)
x["name"] = self.extratype
self.extratype = ""
self.extravalues.append(x)
self.extras = False
self.values = {}
def _escapedText(self, txt):
result = txt
result = result.replace("&", "&")
result = result.replace("<", "<")
result = result.replace(">", ">")
return result
def parse_file(self, filename):
parser = make_parser()
parser.setContentHandler(self)
try:
parser.parse(filename)
except ValueError as detail:
if not os.path.exists(filename):
msg = "%s does not exist." % filename
else:
msg = "%s is not a valid Rhythmbox XML database." % filename
raise musiclib.MusicLibError(0, msg)
except (IOError, OSError) as detail:
if not os.path.exists(filename):
msg = "%s does not exist." % filename
else:
msg = detail.strerror()
raise musiclib.MusicLibError(0, msg)
self.filename = filename
return self.albums, self.tracks
def startElement(self, name, attrs):
def startelement(name, attrs):
if name == "entry":
if attrs.get("type") == "song":
self.stargetting = True
else:
self.extratype = attrs.get("type")
self.extras = True
self.stargetting = True
if self.stargetting and name != "entry":
self.current = name
self.values[name] = ""
if name == "rhythmdb":
version = attrs.get("version")
self.head = (
'<?xml version="1.0" standalone="yes"?>\n'
' <rhythmdb version="%s">' % str(version)
)
self.startElement = startelement
class RhythmDB(ContentHandler):
indent = " " * 4
def __init__(self, filename):
# Information is stored as follows:
# self.albums is a dictionary with each key being an artist.
# self.albums[key] is also a dictionary with album names as keys
# and an integer specifying the index of the album in self.tracks
# self.tracks being a list of lists, each of which contains the
# track metadata for an album as dictionaries.
self.tagval = ""
self.name = ""
self.stargetting = False
self.values = {}
self.current = "nothing"
self.tracks = []
self.albums = {}
self.extravalues = []
self.extras = False
self.extratype = ""
parser = make_parser()
parser.setContentHandler(self)
try:
parser.parse(filename)
except ValueError as detail:
if not os.path.exists(filename):
msg = "%s does not exist." % filename
else:
msg = "%s is not a valid Rhythmbox XML database." % filename
raise musiclib.MusicLibError(0, msg)
except (IOError, OSError) as detail:
if not os.path.exists(filename):
msg = "%s does not exist." % filename
else:
msg = detail.strerror()
raise musiclib.MusicLibError(0, msg)
self.filename = filename
def startElement(self, name, attrs):
def startelement(name, attrs):
if name == "entry":
if attrs.get("type") == "song":
self.stargetting = True
else:
self.extratype = attrs.get("type")
self.extras = True
self.stargetting = True
if self.stargetting and name != "entry":
self.current = name
self.values[name] = ""
if name == "rhythmdb":
version = attrs.get("version")
self.head = (
'<?xml version="1.0" standalone="yes"?>\n'
' <rhythmdb version="%s">' % str(version)
)
self.startElement = startelement
def characters(self, ch):
try:
self.values[self.current] += ch
except KeyError:
self.values[self.current] = ch
def endElement(self, name):
if name == "entry" and self.stargetting:
self.stargetting = False
if not self.extras:
audio = {}
for tag, value in self.values.items():
try:
audio.update(CONVERSION[tag](value.strip()))
except TypeError:
audio[CONVERSION[tag]] = value.strip()
except KeyError:
audio["___" + tag] = value.strip()
audio["__library"] = "rhythmbox"
if audio["artist"] not in self.albums:
self.albums[audio["artist"]] = {}
albums = self.albums[audio["artist"]]
if audio["album"] not in albums:
albums[audio["album"]] = len(self.tracks)
self.tracks.append([audio])
else:
index = albums[audio["album"]]
self.tracks[index].append(audio)
else:
x = dict([(z, v.strip()) for z, v in self.values.items()])
x["name"] = self.extratype
self.extratype = ""
self.extravalues.append(x)
self.extras = False
self.values = {}
def tracksByTag(self, parent, parentvalue, child=None, childval=None):
if parent not in SUPPORTEDTAGS:
return
if parent == "artist" and child == "album":
return self.getTracks(parentvalue, childval)
if (childval is None) or (child is None):
files = []
for album in self.tracks:
for f in album:
if f[parent] == parentvalue:
files.append(f)
elif childval and child:
files = []
for album in self.tracks:
for f in album:
if f[parent] == parentvalue and f[child] == childval:
files.append(f)
return [musiclib.Tag(self, z) for z in files]
def children(self, parent, parentvalue, child):
if parent == "artist" and child == "album":
return self.getAlbums(parentvalue)
else:
values = set()
for album in self.tracks:
[values.add(z[child]) for z in album if z[parent] == parentvalue]
return list(values)
def distinctValues(self, tag):
if tag not in SUPPORTEDTAGS:
return
if tag == "artist":
return list(self.albums.keys())
else:
values = set()
for album in self.tracks:
[values.add(z[tag]) for z in album]
return list(values)
def getArtists(self):
return list(self.albums.keys())
def getAlbums(self, artist):
try:
return list(self.albums[artist].keys())
except KeyError:
return None
def getTracks(self, artist, albums=None):
ret = []
if albums is None:
albums = list(self.albums[artist].keys())
if artist in self.albums:
stored = self.albums[artist]
for album in albums:
if album in stored:
ret.extend(self.tracks[stored[album]])
return [musiclib.Tag(self, z) for z in ret]
def _escapedText(self, txt):
result = txt
result = result.replace("&", "&")
result = result.replace("<", "<")
result = result.replace(">", ">")
return result
def delTracks(self, tracks):
prevartist = None
prevalbum = None
for track in tracks:
track = audioinfo.stringtags(track)
artist = track["artist"]
album = track["album"]
if artist != prevartist or album != prevalbum:
dbtracks = self.tracks[self.albums[artist][album]]
filenames = [z[FILENAME] for z in dbtracks]
del dbtracks[filenames.index(track[FILENAME])]
filenames.remove(track[FILENAME])
if not dbtracks:
del self.albums[artist][album]
if not self.albums[artist]:
del self.albums[artist]
def saveTracks(self, tracks):
for old, new in tracks:
old, new = audioinfo.stringtags(old), audioinfo.stringtags(new)
artist = new["artist"]
album = new["album"]
if old["artist"] != artist:
if artist in self.albums:
if album in self.albums[artist]:
index = self.albums[artist][album]
self.tracks[index].append(new)
else:
self.albums[artist][album] = len(self.tracks)
self.tracks.append([new])
else:
self.albums[artist] = {album: len(self.tracks)}
self.tracks.append([new])
elif album != old["album"]:
if album in self.albums[artist]:
self.albums[artist][album].append(new)
else:
self.albums[artist][album] = len(self.tracks)
self.tracks.append([new])
else:
self.tracks[self.albums[artist][album]].append(new)
self.delTracks([old])
def save(self):
filename = path.join(path.dirname(self.filename), "rhythmbox.xml")
f = open(filename, "w")
entry = [self.head + "\n"]
for album in self.tracks:
for track in album:
entry.append(' <entry type="song">\n')
for key, tagvalue in track.items():
try:
if key.startswith("___"):
tagname = key[len("___") :]
else:
temp = RECONVERSION[key](tagvalue)
tagname = list(temp.keys())[0]
tagvalue = temp[tagname]
except TypeError:
tagname = RECONVERSION[key]
except KeyError:
continue
entry.append(
" <%s>%s</%s>\n"
% (
self._escapedText(tagname),
self._escapedText(tagvalue),
self._escapedText(tagname),
)
)
entry.append(" </entry>\n")
f.write(("".join(entry)))
entry = []
entry = []
for value in self.extravalues:
entry.append(' <entry type ="%s">\n' % value["name"])
[
entry.append(
" <%s>%s</%s>\n"
% (
self._escapedText(val),
self._escapedText(value[val]),
self._escapedText(val),
)
)
for val in value
]
entry.append(" </entry>\n")
f.write(("".join(entry)))
entry = []
f.write("</rhythmdb>")
f.close()
backup = path.join(path.dirname(self.filename), "oldrhythmdb.xml")
if not path.exists(backup):
os.rename(self.filename, backup)
os.rename(filename, self.filename)
def close(self):
pass
def search(self, term):
term = term.upper()
ret = []
artists = set([z for z in self.albums if term in z.upper()])
[ret.extend(self.getTracks(z)) for z in artists]
others = set(self.albums).difference(artists)
for artist in others:
albums = self.albums[artist]
for album in albums:
if term in album.upper():
ret.extend(self.getTracks(artist, [album]))
else:
index = self.albums[artist][album]
tracks = self.tracks[index]
for track in tracks:
for value in track.values():
if term in value.upper():
ret.append(track)
break
return [musiclib.Tag(self, z) for z in ret]
def updateSearch(self, term, tracks):
tags = [
"artist",
"title",
FILENAME,
"__path",
"album",
"genre",
"comment",
"year",
]
term = term.lower()
tracks = []
for audio in files:
temp = audioinfo.stringtags(audio)
for tag in tags:
if term in temp[tag].lower():
tracks.append(audio)
break
return tracks
class InitWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.dbpath = QLineEdit(
path.join(str(QDir.homePath()), ".gnome2/rhythmbox/rhythmdb.xml")
)
vbox = QVBoxLayout()
label = QLabel("&Database Path")
label.setBuddy(self.dbpath)
[vbox.addWidget(z) for z in [label, self.dbpath]]
hbox = QHBoxLayout()
openfile = QPushButton("&Browse...")
hbox.addStretch()
hbox.addWidget(openfile)
vbox.addLayout(hbox)
openfile.clicked.connect(self.getFile)
vbox.addStretch()
self.setLayout(vbox)
self.dbpath.selectAll()
self.dbpath.setFocus()
def getFile(self):
selectedFile = QFileDialog.getOpenFileName(
self, "Select RhythmBox database file.", self.dbpath.text()
)
filename = selectedFile[0]
if filename:
self.dbpath.setText(filename)
def library(self):
dbpath = str(self.dbpath.text())
try:
return RhythmDB(dbpath)
except musiclib.MusicLibError as e:
raise e
except Exception as e:
raise musiclib.MusicLibError(
0,
translate(
"Rhythmbox", "{} is an invalid Rhythmbox music library file."
).format(dbpath),
) from e
if __name__ == "__main__":
k = DBParser()
x, y = k.parse_file("rdb.xml")
# import pdb
# pdb.set_trace()
print([i for i, z in enumerate(y) if len(z) > 1])
print(list(x.keys())[0], x[list(x.keys())[0]], y[34])
|
objc | _lazyimport | """
Helper module that will enable lazy imports of Cocoa wrapper items.
This should improve startup times and memory usage, at the cost
of not being able to use 'from Cocoa import *'
"""
__all__ = ("ObjCLazyModule",)
import re
import struct
import sys
import objc
from objc import getClassList, loadBundle, lookUpClass, nosuchclass_error
ModuleType = type(sys)
def _loadBundle(frameworkName, frameworkIdentifier, frameworkPath):
if frameworkIdentifier is None:
bundle = loadBundle(
frameworkName, {}, bundle_path=frameworkPath, scan_classes=False
)
else:
try:
bundle = loadBundle(
frameworkName,
{},
bundle_identifier=frameworkIdentifier,
scan_classes=False,
)
except ImportError:
bundle = loadBundle(
frameworkName, {}, bundle_path=frameworkPath, scan_classes=False
)
return bundle
class GetAttrMap(object):
__slots__ = ("_container",)
def __init__(self, container):
self._container = container
def __getitem__(self, key):
try:
return getattr(self._container, key)
except AttributeError:
raise KeyError(key)
class ObjCLazyModule(ModuleType):
# Define slots for all attributes, that way they don't end up it __dict__.
__slots__ = (
"_ObjCLazyModule__bundle",
"_ObjCLazyModule__enummap",
"_ObjCLazyModule__funcmap",
"_ObjCLazyModule__parents",
"_ObjCLazyModule__varmap",
"_ObjCLazyModule__inlinelist",
"_ObjCLazyModule__aliases",
)
def __init__(
self,
name,
frameworkIdentifier,
frameworkPath,
metadict,
inline_list=None,
initialdict={},
parents=(),
):
super(ObjCLazyModule, self).__init__(name)
if frameworkIdentifier is not None or frameworkPath is not None:
self.__bundle = self.__dict__["__bundle__"] = _loadBundle(
name, frameworkIdentifier, frameworkPath
)
pfx = name + "."
for nm in sys.modules:
if nm.startswith(pfx):
rest = nm[len(pfx) :]
if "." in rest:
continue
if sys.modules[nm] is not None:
self.__dict__[rest] = sys.modules[nm]
self.__dict__.update(initialdict)
self.__dict__.update(metadict.get("misc", {}))
self.__parents = parents
self.__varmap = metadict.get("constants")
self.__varmap_dct = metadict.get("constants_dict", {})
self.__enummap = metadict.get("enums")
self.__funcmap = metadict.get("functions")
self.__aliases = metadict.get("aliases")
self.__inlinelist = inline_list
self.__expressions = metadict.get("expressions")
self.__expressions_mapping = GetAttrMap(self)
self.__load_cftypes(metadict.get("cftypes"))
if metadict.get("protocols") is not None:
self.__dict__["protocols"] = ModuleType("%s.protocols" % (name,))
self.__dict__["protocols"].__dict__.update(metadict["protocols"])
for p in objc.protocolsForProcess():
setattr(self.__dict__["protocols"], p.__name__, p)
def __dir__(self):
return self.__all__
def __getattr__(self, name):
if name == "__all__":
# Load everything immediately
value = self.__calc_all()
self.__dict__[name] = value
return value
# First try parent module, as we had done
# 'from parents import *'
for p in self.__parents:
try:
value = getattr(p, name)
except AttributeError:
pass
else:
self.__dict__[name] = value
return value
# Check if the name is a constant from
# the metadata files
try:
value = self.__get_constant(name)
except AttributeError:
pass
else:
self.__dict__[name] = value
return value
# Then check if the name is class
try:
value = lookUpClass(name)
except nosuchclass_error:
pass
else:
self.__dict__[name] = value
return value
# Finally give up and raise AttributeError
raise AttributeError(name)
def __calc_all(self):
all = set()
# Ensure that all dynamic entries get loaded
if self.__varmap_dct:
for nm in self.__varmap_dct:
try:
getattr(self, nm)
except AttributeError:
pass
if self.__varmap:
for nm in re.findall(r"\$([A-Z0-9a-z_]*)(?:@[^$]*)?(?=\$)", self.__varmap):
try:
getattr(self, nm)
except AttributeError:
pass
if self.__enummap:
for nm in re.findall(r"\$([A-Z0-9a-z_]*)@[^$]*(?=\$)", self.__enummap):
try:
getattr(self, nm)
except AttributeError:
pass
if self.__funcmap:
for nm in self.__funcmap:
try:
getattr(self, nm)
except AttributeError:
pass
if self.__expressions:
for nm in self.__expressions:
try:
getattr(self, nm)
except AttributeError:
pass
if self.__aliases:
for nm in self.__aliases:
try:
getattr(self, nm)
except AttributeError:
pass
# Add all names that are already in our __dict__
all.update(self.__dict__)
# Merge __all__of parents ('from parent import *')
for p in self.__parents:
all.update(getattr(p, "__all__", ()))
# Add all class names
all.update(cls.__name__ for cls in getClassList())
return [v for v in all if not v.startswith("_")]
return list(all)
def __get_constant(self, name):
# FIXME: Loading variables and functions requires too much
# code at the moment, the objc API can be adjusted for
# this later on.
if self.__varmap_dct:
if name in self.__varmap_dct:
tp = self.__varmap_dct[name]
return objc._loadConstant(name, tp, False)
if self.__varmap:
m = re.search(r"\$%s(@[^$]*)?\$" % (name,), self.__varmap)
if m is not None:
tp = m.group(1)
if tp is None:
tp = "@"
else:
tp = tp[1:]
d = {}
if tp.startswith("="):
tp = tp[1:]
magic = True
else:
magic = False
# try:
return objc._loadConstant(name, tp, magic)
# except Exception as exc:
# print "LOAD %r %r %r -> raise %s"%(name, tp, magic, exc)
# raise
if self.__enummap:
m = re.search(r"\$%s@([^$]*)\$" % (name,), self.__enummap)
if m is not None:
val = m.group(1)
if val.startswith("'"):
if isinstance(val, bytes):
# Python 2.x
(val,) = struct.unpack(">l", val[1:-1])
else:
# Python 3.x
(val,) = struct.unpack(">l", val[1:-1].encode("latin1"))
elif "." in val:
val = float(val)
else:
val = int(val)
return val
if self.__funcmap:
if name in self.__funcmap:
info = self.__funcmap[name]
func_list = [(name,) + info]
d = {}
objc.loadBundleFunctions(self.__bundle, d, func_list)
if name in d:
return d[name]
if self.__inlinelist is not None:
try:
objc.loadFunctionList(
self.__inlinelist, d, func_list, skip_undefined=False
)
except objc.error:
pass
else:
if name in d:
return d[name]
if self.__expressions:
if name in self.__expressions:
info = self.__expressions[name]
try:
return eval(info, {}, self.__expressions_mapping)
except NameError:
pass
if self.__aliases:
if name in self.__aliases:
alias = self.__aliases[name]
if alias == "ULONG_MAX":
return (sys.maxsize * 2) + 1
elif alias == "LONG_MAX":
return sys.maxsize
elif alias == "LONG_MIN":
return -sys.maxsize - 1
return getattr(self, alias)
raise AttributeError(name)
def __load_cftypes(self, cftypes):
if not cftypes:
return
for name, type, gettypeid_func, tollfree in cftypes:
if tollfree:
for nm in tollfree.split(","):
try:
objc.lookUpClass(nm)
except objc.error:
pass
else:
tollfree = nm
break
try:
v = objc.registerCFSignature(name, type, None, tollfree)
if v is not None:
self.__dict__[name] = v
continue
except objc.nosuchclass_error:
pass
try:
func = getattr(self, gettypeid_func)
except AttributeError:
# GetTypeID function not found, this is either
# a CFType that isn't present on the current
# platform, or a CFType without a public GetTypeID
# function. Proxy using the generic CFType
if tollfree is None:
v = objc.registerCFSignature(name, type, None, "NSCFType")
if v is not None:
self.__dict__[name] = v
continue
v = objc.registerCFSignature(name, type, func())
if v is not None:
self.__dict__[name] = v
|
comictaggerlib | options | """CLI options class for ComicTagger app"""
# Copyright 2012-2014 Anthony Beville
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import getopt
import os
import platform
import sys
import traceback
try:
import argparse
except ImportError:
pass
import ctversion
import utils
from comicarchive import MetaDataStyle
from genericmetadata import GenericMetadata
from versionchecker import VersionChecker
class Options:
help_text = """Usage: {0} [option] ... [file [files ...]]
A utility for reading and writing metadata to comic archives.
If no options are given, {0} will run in windowed mode.
-p, --print Print out tag info from file. Specify type
(via -t) to get only info of that tag type.
--raw With -p, will print out the raw tag block(s)
from the file.
-d, --delete Deletes the tag block of specified type (via
-t).
-c, --copy=SOURCE Copy the specified source tag block to
destination style specified via -t
(potentially lossy operation).
-s, --save Save out tags as specified type (via -t).
Must specify also at least -o, -p, or -m.
--nooverwrite Don't modify tag block if it already exists
(relevant for -s or -c).
-1, --assume-issue-one Assume issue number is 1 if not found
(relevant for -s).
-n, --dryrun Don't actually modify file (only relevant for
-d, -s, or -r).
-t, --type=TYPE Specify TYPE as either "CR", "CBL", or
"COMET" (as either ComicRack, ComicBookLover,
or CoMet style tags, respectively).
-f, --parsefilename Parse the filename to get some info,
specifically series name, issue number,
volume, and publication year.
-i, --interactive Interactively query the user when there are
multiple matches for an online search.
--nosummary Suppress the default summary after a save
operation.
-o, --online Search online and attempt to identify file
using existing metadata and images in archive.
May be used in conjunction with -f and -m.
--id=ID Use the issue ID when searching online.
Overrides all other metadata.
-m, --metadata=LIST Explicitly define, as a list, some tags to be
used. e.g.:
"series=Plastic Man, publisher=Quality Comics"
"series=Kickers^, Inc., issue=1, year=1986"
Name-Value pairs are comma separated. Use a
"^" to escape an "=" or a ",", as shown in
the example above. Some names that can be
used: series, issue, issueCount, year,
publisher, title
-r, --rename Rename the file based on specified tag style.
--noabort Don't abort save operation when online match
is of low confidence.
-e, --export-to-zip Export RAR archive to Zip format.
--delete-rar Delete original RAR archive after successful
export to Zip.
--abort-on-conflict Don't export to zip if intended new filename
exists (otherwise, creates a new unique
filename).
-S, --script=FILE Run an "add-on" python script that uses the
ComicTagger library for custom processing.
Script arguments can follow the script name.
-R, --recursive Recursively include files in sub-folders.
--cv-api-key=KEY Use the given Comic Vine API Key (persisted
in settings).
--only-set-cv-key Only set the Comic Vine API key and quit.
-w, --wait-on-cv-rate-limit When encountering a Comic Vine rate limit
error, wait and retry query.
-v, --verbose Be noisy when doing what it does.
--terse Don't say much (for print mode).
--version Display version.
-h, --help Display this message.
For more help visit the wiki at: http://code.google.com/p/comictagger/
"""
def __init__(self):
self.data_style = None
self.no_gui = False
self.filename = None
self.verbose = False
self.terse = False
self.metadata = None
self.print_tags = False
self.copy_tags = False
self.delete_tags = False
self.export_to_zip = False
self.abort_export_on_conflict = False
self.delete_rar_after_export = False
self.search_online = False
self.dryrun = False
self.abortOnLowConfidence = True
self.save_tags = False
self.parse_filename = False
self.show_save_summary = True
self.raw = False
self.cv_api_key = None
self.only_set_key = False
self.rename_file = False
self.no_overwrite = False
self.interactive = False
self.issue_id = None
self.recursive = False
self.run_script = False
self.script = None
self.wait_and_retry_on_rate_limit = False
self.assume_issue_is_one_if_not_set = False
self.file_list = []
def display_msg_and_quit(self, msg, code, show_help=False):
appname = os.path.basename(sys.argv[0])
if msg is not None:
print(msg)
if show_help:
print(self.help_text.format(appname))
else:
print("For more help, run with '--help'")
sys.exit(code)
def parseMetadataFromString(self, mdstr):
"""The metadata string is a comma separated list of name-value pairs
The names match the attributes of the internal metadata struct (for now)
The caret is the special "escape character", since it's not common in
natural language text
example = "series=Kickers^, Inc. ,issue=1, year=1986"
"""
escaped_comma = "^,"
escaped_equals = "^="
replacement_token = "<_~_>"
md = GenericMetadata()
# First, replace escaped commas with with a unique token (to be changed
# back later)
mdstr = mdstr.replace(escaped_comma, replacement_token)
tmp_list = mdstr.split(",")
md_list = []
for item in tmp_list:
item = item.replace(replacement_token, ",")
md_list.append(item)
# Now build a nice dict from the list
md_dict = dict()
for item in md_list:
# Make sure to fix any escaped equal signs
i = item.replace(escaped_equals, replacement_token)
key, value = i.split("=")
value = value.replace(replacement_token, "=").strip()
key = key.strip()
if key.lower() == "credit":
cred_attribs = value.split(":")
role = cred_attribs[0]
person = (cred_attribs[1] if len(cred_attribs) > 1 else "")
primary = (cred_attribs[2] if len(cred_attribs) > 2 else None)
md.addCredit(
person.strip(),
role.strip(),
True if primary is not None else False)
else:
md_dict[key] = value
# Map the dict to the metadata object
for key in md_dict:
if not hasattr(md, key):
print("Warning: '{0}' is not a valid tag name".format(key))
else:
md.isEmpty = False
setattr(md, key, md_dict[key])
# print(md)
return md
def launch_script(self, scriptfile):
# we were given a script. special case for the args:
# 1. ignore everything before the -S,
# 2. pass all the ones that follow (including script name) to the
# script
script_args = list()
for idx, arg in enumerate(sys.argv):
if arg in ['-S', '--script']:
# found script!
script_args = sys.argv[idx + 1:]
break
sys.argv = script_args
if not os.path.exists(scriptfile):
print("Can't find {0}".format(scriptfile))
else:
# I *think* this makes sense:
# assume the base name of the file is the module name
# add the folder of the given file to the python path
# import module
dirname = os.path.dirname(scriptfile)
module_name = os.path.splitext(os.path.basename(scriptfile))[0]
sys.path = [dirname] + sys.path
try:
script = __import__(module_name)
# Determine if the entry point exists before trying to run it
if "main" in dir(script):
script.main()
else:
print(
"Can't find entry point \"main()\" in module \"{0}\"".format(module_name))
except Exception as e:
print "Script raised an unhandled exception: ", e
print(traceback.format_exc())
sys.exit(0)
def parseCmdLineArgs(self):
if platform.system() == "Darwin" and hasattr(
sys, "frozen") and sys.frozen == 1:
# remove the PSN ("process serial number") argument from OS/X
input_args = [a for a in sys.argv[1:] if "-psn_0_" not in a]
else:
input_args = sys.argv[1:]
# first check if we're launching a script:
for n in range(len(input_args)):
if (input_args[n] in ["-S", "--script"] and
n + 1 < len(input_args)):
# insert a "--" which will cause getopt to ignore the remaining args
# so they will be passed to the script
input_args.insert(n + 2, "--")
break
# parse command line options
try:
opts, args = getopt.getopt(input_args,
"hpdt:fm:vownsrc:ieRS:1",
["help", "print", "delete", "type=", "copy=", "parsefilename",
"metadata=", "verbose", "online", "dryrun", "save", "rename",
"raw", "noabort", "terse", "nooverwrite", "interactive",
"nosummary", "version", "id=", "recursive", "script=",
"export-to-zip", "delete-rar", "abort-on-conflict",
"assume-issue-one", "cv-api-key=", "only-set-cv-key",
"wait-on-cv-rate-limit"])
except getopt.GetoptError as err:
self.display_msg_and_quit(str(err), 2)
# process options
for o, a in opts:
if o in ("-h", "--help"):
self.display_msg_and_quit(None, 0, show_help=True)
if o in ("-v", "--verbose"):
self.verbose = True
if o in ("-S", "--script"):
self.run_script = True
self.script = a
if o in ("-R", "--recursive"):
self.recursive = True
if o in ("-p", "--print"):
self.print_tags = True
if o in ("-d", "--delete"):
self.delete_tags = True
if o in ("-i", "--interactive"):
self.interactive = True
if o in ("-c", "--copy"):
self.copy_tags = True
if a.lower() == "cr":
self.copy_source = MetaDataStyle.CIX
elif a.lower() == "cbl":
self.copy_source = MetaDataStyle.CBI
elif a.lower() == "comet":
self.copy_source = MetaDataStyle.COMET
else:
self.display_msg_and_quit(
"Invalid copy tag source type", 1)
if o in ("-o", "--online"):
self.search_online = True
if o in ("-n", "--dryrun"):
self.dryrun = True
if o in ("-m", "--metadata"):
self.metadata = self.parseMetadataFromString(a)
if o in ("-s", "--save"):
self.save_tags = True
if o in ("-r", "--rename"):
self.rename_file = True
if o in ("-e", "--export_to_zip"):
self.export_to_zip = True
if o == "--delete-rar":
self.delete_rar_after_export = True
if o == "--abort-on-conflict":
self.abort_export_on_conflict = True
if o in ("-f", "--parsefilename"):
self.parse_filename = True
if o in ("-w", "--wait-on-cv-rate-limit"):
self.wait_and_retry_on_rate_limit = True
if o == "--id":
self.issue_id = a
if o == "--raw":
self.raw = True
if o == "--noabort":
self.abortOnLowConfidence = False
if o == "--terse":
self.terse = True
if o == "--nosummary":
self.show_save_summary = False
if o in ("-1", "--assume-issue-one"):
self.assume_issue_is_one_if_not_set = True
if o == "--nooverwrite":
self.no_overwrite = True
if o == "--cv-api-key":
self.cv_api_key = a
if o == "--only-set-cv-key":
self.only_set_key = True
if o == "--version":
print(
"ComicTagger {0} [{1} / {2}]".format(ctversion.version, ctversion.fork, ctversion.fork_tag))
print(
"Modified version of ComicTagger (Copyright (c) 2012-2014 Anthony Beville)")
print(
"Distributed under Apache License 2.0 (http://www.apache.org/licenses/LICENSE-2.0)")
sys.exit(0)
if o in ("-t", "--type"):
if a.lower() == "cr":
self.data_style = MetaDataStyle.CIX
elif a.lower() == "cbl":
self.data_style = MetaDataStyle.CBI
elif a.lower() == "comet":
self.data_style = MetaDataStyle.COMET
else:
self.display_msg_and_quit("Invalid tag type", 1)
if self.print_tags or self.delete_tags or self.save_tags or self.copy_tags or self.rename_file or self.export_to_zip or self.only_set_key:
self.no_gui = True
count = 0
if self.run_script:
count += 1
if self.print_tags:
count += 1
if self.delete_tags:
count += 1
if self.save_tags:
count += 1
if self.copy_tags:
count += 1
if self.rename_file:
count += 1
if self.export_to_zip:
count += 1
if self.only_set_key:
count += 1
if count > 1:
self.display_msg_and_quit(
"Must choose only one action of print, delete, save, copy, rename, export, set key, or run script",
1)
if self.script is not None:
self.launch_script(self.script)
if len(args) > 0:
if platform.system() == "Windows":
# no globbing on windows shell, so do it for them
import glob
self.file_list = []
for item in args:
self.file_list.extend(glob.glob(item))
if len(self.file_list) > 0:
self.filename = self.file_list[0]
else:
self.filename = args[0]
self.file_list = args
if self.only_set_key and self.cv_api_key is None:
self.display_msg_and_quit("Key not given!", 1)
if (self.only_set_key == False) and self.no_gui and (
self.filename is None):
self.display_msg_and_quit(
"Command requires at least one filename!", 1)
if self.delete_tags and self.data_style is None:
self.display_msg_and_quit(
"Please specify the type to delete with -t", 1)
if self.save_tags and self.data_style is None:
self.display_msg_and_quit(
"Please specify the type to save with -t", 1)
if self.copy_tags and self.data_style is None:
self.display_msg_and_quit(
"Please specify the type to copy to with -t", 1)
# if self.rename_file and self.data_style is None:
# self.display_msg_and_quit("Please specify the type to use for renaming with -t", 1)
if self.recursive:
self.file_list = utils.get_recursive_filelist(self.file_list)
|
lib | static | import mimetypes
import os
import re
import stat
try:
from io import UnsupportedOperation
except ImportError:
UnsupportedOperation = object()
import cherrypy
from cherrypy._cpcompat import ntob, unquote
from cherrypy.lib import cptools, file_generator_limited, httputil
mimetypes.init()
mimetypes.types_map[".dwg"] = "image/x-dwg"
mimetypes.types_map[".ico"] = "image/x-icon"
mimetypes.types_map[".bz2"] = "application/x-bzip2"
mimetypes.types_map[".gz"] = "application/x-gzip"
def serve_file(path, content_type=None, disposition=None, name=None, debug=False):
"""Set status, headers, and body in order to serve the given path.
The Content-Type header will be set to the content_type arg, if provided.
If not provided, the Content-Type will be guessed by the file extension
of the 'path' argument.
If disposition is not None, the Content-Disposition header will be set
to "<disposition>; filename=<name>". If name is None, it will be set
to the basename of path. If disposition is None, no Content-Disposition
header will be written.
"""
response = cherrypy.serving.response
# If path is relative, users should fix it by making path absolute.
# That is, CherryPy should not guess where the application root is.
# It certainly should *not* use cwd (since CP may be invoked from a
# variety of paths). If using tools.staticdir, you can make your relative
# paths become absolute by supplying a value for "tools.staticdir.root".
if not os.path.isabs(path):
msg = "'%s' is not an absolute path." % path
if debug:
cherrypy.log(msg, "TOOLS.STATICFILE")
raise ValueError(msg)
try:
st = os.stat(path)
except OSError:
if debug:
cherrypy.log("os.stat(%r) failed" % path, "TOOLS.STATIC")
raise cherrypy.NotFound()
# Check if path is a directory.
if stat.S_ISDIR(st.st_mode):
# Let the caller deal with it as they like.
if debug:
cherrypy.log("%r is a directory" % path, "TOOLS.STATIC")
raise cherrypy.NotFound()
# Set the Last-Modified response header, so that
# modified-since validation code can work.
response.headers["Last-Modified"] = httputil.HTTPDate(st.st_mtime)
cptools.validate_since()
if content_type is None:
# Set content-type based on filename extension
ext = ""
i = path.rfind(".")
if i != -1:
ext = path[i:].lower()
content_type = mimetypes.types_map.get(ext, None)
if content_type is not None:
response.headers["Content-Type"] = content_type
if debug:
cherrypy.log("Content-Type: %r" % content_type, "TOOLS.STATIC")
cd = None
if disposition is not None:
if name is None:
name = os.path.basename(path)
cd = '%s; filename="%s"' % (disposition, name)
response.headers["Content-Disposition"] = cd
if debug:
cherrypy.log("Content-Disposition: %r" % cd, "TOOLS.STATIC")
# Set Content-Length and use an iterable (file object)
# this way CP won't load the whole file in memory
content_length = st.st_size
fileobj = open(path, "rb")
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
def serve_fileobj(fileobj, content_type=None, disposition=None, name=None, debug=False):
"""Set status, headers, and body in order to serve the given file object.
The Content-Type header will be set to the content_type arg, if provided.
If disposition is not None, the Content-Disposition header will be set
to "<disposition>; filename=<name>". If name is None, 'filename' will
not be set. If disposition is None, no Content-Disposition header will
be written.
CAUTION: If the request contains a 'Range' header, one or more seek()s will
be performed on the file object. This may cause undesired behavior if
the file object is not seekable. It could also produce undesired results
if the caller set the read position of the file object prior to calling
serve_fileobj(), expecting that the data would be served starting from that
position.
"""
response = cherrypy.serving.response
try:
st = os.fstat(fileobj.fileno())
except AttributeError:
if debug:
cherrypy.log("os has no fstat attribute", "TOOLS.STATIC")
content_length = None
except UnsupportedOperation:
content_length = None
else:
# Set the Last-Modified response header, so that
# modified-since validation code can work.
response.headers["Last-Modified"] = httputil.HTTPDate(st.st_mtime)
cptools.validate_since()
content_length = st.st_size
if content_type is not None:
response.headers["Content-Type"] = content_type
if debug:
cherrypy.log("Content-Type: %r" % content_type, "TOOLS.STATIC")
cd = None
if disposition is not None:
if name is None:
cd = disposition
else:
cd = '%s; filename="%s"' % (disposition, name)
response.headers["Content-Disposition"] = cd
if debug:
cherrypy.log("Content-Disposition: %r" % cd, "TOOLS.STATIC")
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
def _serve_fileobj(fileobj, content_type, content_length, debug=False):
"""Internal. Set response.body to the given file object, perhaps ranged."""
response = cherrypy.serving.response
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
request = cherrypy.serving.request
if request.protocol >= (1, 1):
response.headers["Accept-Ranges"] = "bytes"
r = httputil.get_ranges(request.headers.get("Range"), content_length)
if r == []:
response.headers["Content-Range"] = "bytes */%s" % content_length
message = "Invalid Range (first-byte-pos greater than " "Content-Length)"
if debug:
cherrypy.log(message, "TOOLS.STATIC")
raise cherrypy.HTTPError(416, message)
if r:
if len(r) == 1:
# Return a single-part response.
start, stop = r[0]
if stop > content_length:
stop = content_length
r_len = stop - start
if debug:
cherrypy.log(
"Single part; start: %r, stop: %r" % (start, stop),
"TOOLS.STATIC",
)
response.status = "206 Partial Content"
response.headers["Content-Range"] = "bytes %s-%s/%s" % (
start,
stop - 1,
content_length,
)
response.headers["Content-Length"] = r_len
fileobj.seek(start)
response.body = file_generator_limited(fileobj, r_len)
else:
# Return a multipart/byteranges response.
response.status = "206 Partial Content"
try:
# Python 3
from email.generator import _make_boundary as make_boundary
except ImportError:
# Python 2
from mimetools import choose_boundary as make_boundary
boundary = make_boundary()
ct = "multipart/byteranges; boundary=%s" % boundary
response.headers["Content-Type"] = ct
if "Content-Length" in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers["Content-Length"]
def file_ranges():
# Apache compatibility:
yield ntob("\r\n")
for start, stop in r:
if debug:
cherrypy.log(
"Multipart; start: %r, stop: %r" % (start, stop),
"TOOLS.STATIC",
)
yield ntob("--" + boundary, "ascii")
yield ntob("\r\nContent-type: %s" % content_type, "ascii")
yield ntob(
"\r\nContent-range: bytes %s-%s/%s\r\n\r\n"
% (start, stop - 1, content_length),
"ascii",
)
fileobj.seek(start)
gen = file_generator_limited(fileobj, stop - start)
for chunk in gen:
yield chunk
yield ntob("\r\n")
# Final boundary
yield ntob("--" + boundary + "--", "ascii")
# Apache compatibility:
yield ntob("\r\n")
response.body = file_ranges()
return response.body
else:
if debug:
cherrypy.log("No byteranges requested", "TOOLS.STATIC")
# Set Content-Length and use an iterable (file object)
# this way CP won't load the whole file in memory
response.headers["Content-Length"] = content_length
response.body = fileobj
return response.body
def serve_download(path, name=None):
"""Serve 'path' as an application/x-download attachment."""
# This is such a common idiom I felt it deserved its own wrapper.
return serve_file(path, "application/x-download", "attachment", name)
def _attempt(filename, content_types, debug=False):
if debug:
cherrypy.log(
"Attempting %r (content_types %r)" % (filename, content_types),
"TOOLS.STATICDIR",
)
try:
# you can set the content types for a
# complete directory per extension
content_type = None
if content_types:
r, ext = os.path.splitext(filename)
content_type = content_types.get(ext[1:], None)
serve_file(filename, content_type=content_type, debug=debug)
return True
except cherrypy.NotFound:
# If we didn't find the static file, continue handling the
# request. We might find a dynamic handler instead.
if debug:
cherrypy.log("NotFound", "TOOLS.STATICFILE")
return False
def staticdir(
section, dir, root="", match="", content_types=None, index="", debug=False
):
"""Serve a static resource from the given (root +) dir.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
index
If provided, it should be the (relative) name of a file to
serve for directory requests. For example, if the dir argument is
'/home/me', the Request-URI is 'myapp', and the index arg is
'index.html', the file '/home/me/myapp/index.html' will be sought.
"""
request = cherrypy.serving.request
if request.method not in ("GET", "HEAD"):
if debug:
cherrypy.log("request.method not GET or HEAD", "TOOLS.STATICDIR")
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log(
"request.path_info %r does not match pattern %r"
% (request.path_info, match),
"TOOLS.STATICDIR",
)
return False
# Allow the use of '~' to refer to a user's home directory.
dir = os.path.expanduser(dir)
# If dir is relative, make absolute using "root".
if not os.path.isabs(dir):
if not root:
msg = "Static dir requires an absolute dir (or root)."
if debug:
cherrypy.log(msg, "TOOLS.STATICDIR")
raise ValueError(msg)
dir = os.path.join(root, dir)
# Determine where we are in the object tree relative to 'section'
# (where the static tool was defined).
if section == "global":
section = "/"
section = section.rstrip(r"\/")
branch = request.path_info[len(section) + 1 :]
branch = unquote(branch.lstrip(r"\/"))
# If branch is "", filename will end in a slash
filename = os.path.join(dir, branch)
if debug:
cherrypy.log(
"Checking file %r to fulfill %r" % (filename, request.path_info),
"TOOLS.STATICDIR",
)
# There's a chance that the branch pulled from the URL might
# have ".." or similar uplevel attacks in it. Check that the final
# filename is a child of dir.
if not os.path.normpath(filename).startswith(os.path.normpath(dir)):
raise cherrypy.HTTPError(403) # Forbidden
handled = _attempt(filename, content_types)
if not handled:
# Check for an index file if a folder was requested.
if index:
handled = _attempt(os.path.join(filename, index), content_types)
if handled:
request.is_index = filename[-1] in (r"\/")
return handled
def staticfile(filename, root=None, match="", content_types=None, debug=False):
"""Serve a static resource from the given (root +) filename.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
"""
request = cherrypy.serving.request
if request.method not in ("GET", "HEAD"):
if debug:
cherrypy.log("request.method not GET or HEAD", "TOOLS.STATICFILE")
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log(
"request.path_info %r does not match pattern %r"
% (request.path_info, match),
"TOOLS.STATICFILE",
)
return False
# If filename is relative, make absolute using "root".
if not os.path.isabs(filename):
if not root:
msg = "Static tool requires an absolute filename (got '%s')." % (filename,)
if debug:
cherrypy.log(msg, "TOOLS.STATICFILE")
raise ValueError(msg)
filename = os.path.join(root, filename)
return _attempt(filename, content_types, debug=debug)
|
downloaders | GoogledriveCom | # -*- coding: utf-8 -*
#
# Test links:
# https://drive.google.com/file/d/0B6RNTe4ygItBQm15RnJiTmMyckU/view?pli=1
import json
import re
import urllib.parse
from pyload.core.network.http.exceptions import BadHeader
from pyload.core.utils import parse
from ..base.downloader import BaseDownloader
class GoogledriveCom(BaseDownloader):
__name__ = "GoogledriveCom"
__type__ = "downloader"
__version__ = "0.35"
__status__ = "testing"
__pattern__ = r"https?://(?:www\.)?(?:drive|docs)\.google\.com/(?:file/d/|uc\?.*id=)(?P<ID>[-\w]+)"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
]
__description__ = """Drive.google.com downloader plugin"""
__license__ = "GPLv3"
__authors__ = [
("zapp-brannigan", "fuerst.reinje@web.de"),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com"),
]
INFO_PATTERN = r'<span class="uc-name-size"><a href="[^"]+">(?P<N>.+?)</a> \((?P<S>[\d.,]+)(?P<U>[\w^_]+)\)</span>'
API_URL = "https://www.googleapis.com/drive/v3/"
API_KEY = "AIzaSyB68u-qFPP9oBJpo1DWAPFE_VD2Sfy9hpk"
def setup(self):
self.multi_dl = True
self.resume_download = True
self.chunk_limit = 1
def api_request(self, cmd, **kwargs):
kwargs["key"] = self.API_KEY
try:
json_data = json.loads(
self.load("{}{}".format(self.API_URL, cmd), get=kwargs)
)
self.log_debug(f"API response: {json_data}")
return json_data
except BadHeader as exc:
try:
json_data = json.loads(exc.content)
self.log_error(
"API Error: {}".format(cmd),
json_data["error"]["message"],
"ID: {}".format(self.info["pattern"]["ID"]),
"Error code: {}".format(exc.code),
)
except ValueError:
self.log_error(
"API Error: {}".format(cmd),
exc,
"ID: {}".format(self.info["pattern"]["ID"]),
"Error code: {}".format(exc.code),
)
return None
def api_download(self, disposition):
try:
self.download(
"{}{}/{}".format(self.API_URL, "files", self.info["pattern"]["ID"]),
get={
"alt": "media",
"acknowledgeAbuse": "true",
"supportsAllDrives": "true",
"key": self.API_KEY,
},
disposition=disposition,
)
except BadHeader as exc:
if exc.code == 404:
self.offline()
elif exc.code == 403:
self.temp_offline()
else:
raise
def process(self, pyfile):
disposition = False
json_data = self.api_request(
"files/" + self.info["pattern"]["ID"],
fields="md5Checksum,name,size",
supportsAllDrives="true",
)
if json_data is None:
self.fail("API error")
self.data = self.load(pyfile.url, ref=False)
if "error" in json_data:
if json_data["error"]["code"] == 404:
if "Virus scan warning" not in self.data:
self.offline()
else:
m = re.search(self.INFO_PATTERN, self.data)
if m is not None:
pyfile.name = m.group("N")
pyfile.size = parse.bytesize(m.group("S"), m.group("U"))
else:
disposition = True
else:
self.fail(json_data["error"]["message"])
else:
pyfile.size = int(json_data["size"])
pyfile.name = json_data["name"]
self.info["md5"] = json_data["md5Checksum"]
# Somehow, API downloads are significantly slow compared to "normal" download :(
# self.api_download(disposition)
for _i in range(2):
m = re.search(r'"([^"]+uc\?.*?)"', self.data)
if m is None:
if "Quota exceeded" in self.data:
self.temp_offline()
else:
self.fail(self._("link pattern not found"))
link = re.sub(
r"\\[uU]([\da-fA-F]{4})", lambda x: chr(int(x.group(1), 16)), m.group(1)
) #: unescape unicode-escape
link = urllib.parse.urljoin(pyfile.url, link)
#: "Only files smaller than 100 MB can be scanned for viruses"
#: https://support.google.com/a/answer/172541?hl=en
if pyfile.size > 104857600 or "Virus scan warning" in self.data:
if re.search(r"/uc\?.*&confirm=", link):
self.download(link, disposition=disposition)
break
else:
self.data = self.load(link)
else:
self.download(link, disposition=disposition)
break
|
cd | cdprefs | # Copyright (C) 2009-2010 Aren Olson
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
import os
from xl import settings, transcoder
from xl.nls import gettext as _
from xlgui.preferences import widgets
name = _("CD")
basedir = os.path.dirname(os.path.realpath(__file__))
ui = os.path.join(basedir, "cdprefs_pane.ui")
FORMAT_WIDGET = None
class ImportMetadataPreference(widgets.CheckPreference):
name = "cd_import/fetch_metadata_from_internet"
default = True
class OutputFormatPreference(widgets.ComboPreference):
name = "cd_import/format"
class OutputQualityPreference(widgets.ComboPreference, widgets.Conditional):
name = "cd_import/quality"
condition_preference_name = "cd_import/format"
def __init__(self, preferences, widget):
widgets.ComboPreference.__init__(self, preferences, widget)
widgets.Conditional.__init__(self)
self.format = settings.get_option("cd_import/format", None)
self.default = settings.get_option("cd_import/quality", None)
def on_check_condition(self):
"""
Specifies the condition to meet
:returns: Whether the condition is met or not
:rtype: bool
"""
model = self.widget.get_model()
if not model: # happens if preferences window is shut down on close
return False
curiter = self.condition_widget.get_active_iter()
tc_format = self.condition_widget.get_model().get_value(curiter, 0)
formatinfo = transcoder.FORMATS[tc_format]
if self.format != tc_format:
self.format = tc_format
default = formatinfo["default"]
if self.default != default:
self.default = default # raw value
default_title = formatinfo["kbs_steps"][
formatinfo["raw_steps"].index(self.default)
]
active_iter = self.widget.get_active_iter()
if active_iter is not None:
active_title = float(model.get_value(active_iter, 1))
else:
active_title = default_title
self.widget.set_model(None)
model.clear()
steps = zip(formatinfo["raw_steps"], formatinfo["kbs_steps"])
for item, title in steps:
model.append([item, str(title)])
self.widget.set_model(model)
if active_title not in formatinfo["kbs_steps"]:
active_title = default_title
index = formatinfo["kbs_steps"].index(active_title)
self.widget.set_active(index)
return True
class OutputPathPreference(widgets.ComboEntryPreference):
name = "cd_import/outpath"
completion_items = {
"$tracknumber": _("Track number"),
"$title": _("Title"),
"$artist": _("Artist"),
"$composer": _("Composer"),
"$album": _("Album"),
"$__length": _("Length"),
"$discnumber": _("Disc number"),
"$__rating": _("Rating"),
"$date": _("Date"),
"$genre": _("Genre"),
"$bitrate": _("Bitrate"),
"$__loc": _("Location"),
"$filename": _("Filename"),
"$__playcount": _("Play count"),
"$__last_played": _("Last played"),
"$bpm": _("BPM"),
}
preset_items = ["%s/$artist/$album/$tracknumber - $title" % os.getenv("HOME")]
default = "%s/$artist/$album/$tracknumber - $title" % os.getenv("HOME")
# vim: et sts=4 sw=4
|
notes | note | from sglib import constants
from sglib.lib import util
from sglib.lib.translate import _
from sglib.lib.util import *
from sglib.math import clip_min, clip_value, color_interpolate, linear_interpolate
from sglib.models import stargate as sg_project
from sglib.models import theme
from sglib.models.daw import *
from sgui import shared as glbl_shared
from sgui.daw import shared
from sgui.daw.shared import *
from sgui.sgqt import *
from sgui.util import get_font
from . import _shared
PREVIEW_NOTES = set()
class NotePreviewer:
def __init__(self):
self.active = get_file_setting("preview-note", int, 1)
self.last_note = None
self.channel = shared.ITEM_EDITOR.get_midi_channel()
self.rack = shared.CURRENT_ITEM_TRACK
def update(self, note):
if len(PREVIEW_NOTES) > 6 or glbl_shared.IS_PLAYING or glbl_shared.IS_RECORDING:
if self.last_note is not None:
constants.DAW_IPC.note_off(
self.rack,
self.last_note,
self.channel,
)
return
if not self.active or note == self.last_note:
return
assert note >= 0 and note <= 120, note
constants.DAW_IPC.note_off(self.rack, self.last_note, self.channel)
self.last_note = note
constants.DAW_IPC.note_on(self.rack, note, self.channel)
def __del__(self):
constants.DAW_IPC.note_off(self.rack, self.last_note, self.channel)
class PianoRollNoteItem(QGraphicsRectItem):
"""An individual note in the PianoRollEditor"""
def __init__(
self,
a_length,
a_note_height,
a_note,
a_note_item,
a_enabled=True,
):
QGraphicsRectItem.__init__(self, 0, 0, a_length, a_note_height)
if a_enabled:
self.setFlag(QGraphicsItem.GraphicsItemFlag.ItemIsMovable)
self.setFlag(QGraphicsItem.GraphicsItemFlag.ItemIsSelectable)
self.setFlag(
QGraphicsItem.GraphicsItemFlag.ItemSendsScenePositionChanges,
)
self.setZValue(1002.0)
else:
self.setZValue(1001.0)
self.setEnabled(False)
self.setOpacity(0.3)
self.note_height = a_note_height
self.current_note_text = None
self.note_item = a_note_item
self.setAcceptHoverEvents(True)
self.resize_start_pos = self.note_item.start
self.is_copying = False
self.is_velocity_dragging = False
self.is_velocity_curving = False
if (
_shared.SELECTED_PIANO_NOTE is not None
and a_note_item == _shared.SELECTED_PIANO_NOTE
):
self.is_resizing = True
shared.PIANO_ROLL_EDITOR.click_enabled = True
else:
self.is_resizing = False
self.showing_resize_cursor = False
self.resize_rect = self.rect()
self.mouse_y_pos = QCursor.pos().y()
self.note_text = get_font().QGraphicsSimpleTextItem(self)
self.note_text.setPen(QPen(QtCore.Qt.GlobalColor.black))
self.update_note_text()
self.vel_line = QGraphicsLineItem(self)
self.set_vel_line()
self.set_brush()
self.setToolTip(
"A MIDI note to be sent to an instrument in the plugin rack. "
"Select and move with the mouse, click near the end and drag "
f"to change note length. {util.KEY_ALT}+click to multi-select"
)
self.previewer = None
self.selection_toggle = False
def set_vel_line(self):
if _shared.PARAMETER == 0:
f_vel = self.note_item.velocity
f_rect = self.rect()
f_y = (1.0 - (f_vel * 0.007874016)) * f_rect.height()
f_width = f_rect.width()
self.vel_line.setLine(0.0, f_y, f_width, f_y)
elif _shared.PARAMETER >= 1:
value = self.note_item.get_pmn_param(_shared.PARAMETER)
f_rect = self.rect()
f_y = (1.0 - ((value + 1.0) * 0.5)) * f_rect.height()
f_width = f_rect.width()
self.vel_line.setLine(0.0, f_y, f_width, f_y)
def set_brush(self):
if _shared.PARAMETER == 0: # velocity
pos = 1.0 - (self.note_item.velocity / 127.0)
elif _shared.PARAMETER >= 1:
value = self.note_item.get_pmn_param(_shared.PARAMETER)
pos = 1.0 - ((value + 1.0) * 0.5)
pos = clip_value(pos, 0.0, 1.0)
color = color_interpolate(
theme.SYSTEM_COLORS.daw.note_vel_min_color,
theme.SYSTEM_COLORS.daw.note_vel_max_color,
pos,
)
brush = QColor(color)
self.setBrush(brush)
def update_note_text(self, a_note_num=None):
f_note_num = a_note_num if a_note_num is not None else self.note_item.note_num
f_octave = (f_note_num // 12) - 2
f_note = _shared.PIANO_ROLL_NOTE_LABELS[f_note_num % 12]
f_text = "{}{}".format(f_note, f_octave)
if f_text != self.current_note_text:
self.current_note_text = f_text
self.note_text.setText(f_text)
def mouse_is_at_end(self, a_pos):
f_width = self.rect().width()
if f_width >= 30.0:
return a_pos.x() > (f_width - 15.0)
else:
return a_pos.x() > (f_width * 0.72)
def delete_later(self):
if self.isEnabled() and self not in _shared.PIANO_ROLL_DELETED_NOTES:
_shared.PIANO_ROLL_DELETED_NOTES.append(self)
self.hide()
def delete(self):
shared.CURRENT_ITEM.remove_note(self.note_item)
def show_resize_cursor(self, a_event):
f_is_at_end = self.mouse_is_at_end(qt_event_pos(a_event))
if f_is_at_end and not self.showing_resize_cursor:
QApplication.setOverrideCursor(
QtCore.Qt.CursorShape.SizeHorCursor,
)
self.showing_resize_cursor = True
elif not f_is_at_end and self.showing_resize_cursor:
QApplication.restoreOverrideCursor()
self.showing_resize_cursor = False
def get_selected_string(self):
return self.note_item.selection_str()
def hoverEnterEvent(self, a_event):
shared.set_move_cursor()
shared.PIANO_ROLL_EDITOR.click_enabled = False
super().hoverEnterEvent(a_event)
def hoverMoveEvent(self, a_event):
# QGraphicsRectItem.hoverMoveEvent(self, a_event)
if not self.is_resizing and shared._is_move_cursor():
shared.PIANO_ROLL_EDITOR.click_enabled = False
self.show_resize_cursor(a_event)
def hoverLeaveEvent(self, a_event):
shared.restore_move_cursor()
shared.PIANO_ROLL_EDITOR.click_enabled = True
if self.showing_resize_cursor:
QApplication.restoreOverrideCursor()
self.showing_resize_cursor = False
super().hoverLeaveEvent(a_event)
def mouseDoubleClickEvent(self, a_event):
QGraphicsRectItem.mouseDoubleClickEvent(self, a_event)
QApplication.restoreOverrideCursor()
def _mp_vel_drag(self, event):
if not self.isSelected():
shared.PIANO_ROLL_EDITOR.scene.clearSelection()
self.setSelected(True)
self.is_velocity_dragging = True
self._mp_vel_finish(event)
def _mp_vel_curve(self, event):
if not self.isSelected():
shared.PIANO_ROLL_EDITOR.scene.clearSelection()
self.setSelected(True)
f_list = [
x.note_item.start for x in shared.PIANO_ROLL_EDITOR.get_selected_items()
]
if len(f_list) > 1:
f_list.sort()
self.is_velocity_curving = True
self.vc_start = f_list[0]
self.vc_mid = self.note_item.start
self.vc_end = f_list[-1]
elif len(f_list) <= 1:
self.is_velocity_dragging = True
self._mp_vel_finish(event)
def _mp_vel_finish(self, event):
event.setAccepted(True)
QGraphicsRectItem.mousePressEvent(self, event)
self.orig_y = qt_event_pos(event).y()
QApplication.setOverrideCursor(QtCore.Qt.CursorShape.BlankCursor)
for f_item in shared.PIANO_ROLL_EDITOR.get_selected_items():
if _shared.PARAMETER == 0:
f_item.orig_value = f_item.note_item.velocity
elif _shared.PARAMETER >= 1:
f_item.orig_value = f_item.note_item.get_pmn_param(
_shared.PARAMETER,
)
f_item.set_brush()
for f_item in shared.PIANO_ROLL_EDITOR.note_items:
if _shared.PARAMETER == 0:
f_item.note_text.setText(str(f_item.note_item.velocity))
if _shared.PARAMETER >= 1:
f_item.note_text.setText(
str(f_item.note_item.get_pmn_param(_shared.PARAMETER)),
)
def _mp_resize(self, event):
self.is_resizing = True
self.mouse_y_pos = QCursor.pos().y()
self.resize_last_mouse_pos = qt_event_pos(event).x()
for f_item in shared.PIANO_ROLL_EDITOR.get_selected_items():
f_item.resize_start_pos = f_item.note_item.start
f_item.resize_pos = f_item.pos()
f_item.resize_rect = f_item.rect()
def _mp_copy(self, event):
self.is_copying = True
for f_item in shared.PIANO_ROLL_EDITOR.get_selected_items():
shared.PIANO_ROLL_EDITOR.draw_note(f_item.note_item)
def mousePressEvent(self, a_event):
if a_event.button() == QtCore.Qt.MouseButton.RightButton:
return
if a_event.modifiers() == (QtCore.Qt.KeyboardModifier.AltModifier):
self.selection_toggle = True
self.setSelected(not self.isSelected())
return
if shared.EDITOR_MODE == shared.EDITOR_MODE_ERASE:
_shared.piano_roll_set_delete_mode(True)
self.delete_later()
return
elif a_event.modifiers() == (
QtCore.Qt.KeyboardModifier.ControlModifier
| QtCore.Qt.KeyboardModifier.AltModifier
):
self._mp_vel_drag(a_event)
elif a_event.modifiers() == (
QtCore.Qt.KeyboardModifier.ControlModifier
| QtCore.Qt.KeyboardModifier.ShiftModifier
):
self._mp_vel_curve(a_event)
else:
a_event.setAccepted(True)
QGraphicsRectItem.mousePressEvent(self, a_event)
s_brush = QColor(
theme.SYSTEM_COLORS.daw.note_selected_color,
)
self.setBrush(s_brush)
self.o_pos = self.pos()
if self.mouse_is_at_end(qt_event_pos(a_event)):
self._mp_resize(a_event)
elif a_event.modifiers() == (QtCore.Qt.KeyboardModifier.ControlModifier):
self._mp_copy(a_event)
shared.PIANO_ROLL_EDITOR.click_enabled = True
def _mm_vel_drag(self, f_item, f_val):
if _shared.PARAMETER == 0:
f_new_vel = clip_value(
f_val + f_item.orig_value,
1,
127,
)
f_new_vel = int(f_new_vel)
f_item.note_item.velocity = f_new_vel
f_item.note_text.setText(str(f_new_vel))
elif _shared.PARAMETER >= 1:
new_value = clip_value(
(f_val * 0.01) + f_item.orig_value,
-1.0,
1.0,
)
new_value = round(new_value, 2)
f_item.note_item.set_pmn_param(
_shared.PARAMETER,
new_value,
)
f_item.note_text.setText(str(new_value))
f_item.set_brush()
f_item.set_vel_line()
def _mm_vel_curve(self, f_item, f_val):
if _shared.PARAMETER == 0:
f_start = f_item.note_item.start
if f_start == self.vc_mid:
f_new_vel = f_val + f_item.orig_value
else:
if f_start > self.vc_mid:
f_frac = (f_start - self.vc_mid) / (self.vc_end - self.vc_mid)
f_new_vel = linear_interpolate(f_val, 0.3 * f_val, f_frac)
else:
f_frac = (f_start - self.vc_start) / (self.vc_mid - self.vc_start)
f_new_vel = linear_interpolate(0.3 * f_val, f_val, f_frac)
f_new_vel += f_item.orig_value
f_new_vel = clip_value(f_new_vel, 1, 127)
f_new_vel = int(f_new_vel)
f_item.note_item.velocity = f_new_vel
f_item.note_text.setText(str(f_new_vel))
f_item.set_brush()
f_item.set_vel_line()
elif _shared.PARAMETER >= 1:
f_start = f_item.note_item.start
if f_start == self.vc_mid:
new_value = (f_val * 0.01) + f_item.orig_value
else:
if f_start > self.vc_mid:
f_frac = (f_start - self.vc_mid) / (self.vc_end - self.vc_mid)
new_value = linear_interpolate(
f_val * 0.01,
0.003 * f_val,
f_frac,
)
else:
f_frac = (f_start - self.vc_start) / (self.vc_mid - self.vc_start)
new_value = linear_interpolate(
0.003 * f_val,
f_val * 0.01,
f_frac,
)
new_value += f_item.orig_value
new_value = clip_value(new_value, -1.0, 1.0)
new_value = round(new_value, 2)
f_item.note_item.set_pmn_param(
_shared.PARAMETER,
new_value,
)
f_item.note_text.setText(str(new_value))
f_item.set_brush()
f_item.set_vel_line()
def _mm_resize(self, f_item, f_pos_x):
if shared.PIANO_ROLL_SNAP:
f_adjusted_width = (
round(f_pos_x / shared.PIANO_ROLL_SNAP_VALUE)
* shared.PIANO_ROLL_SNAP_VALUE
)
if f_adjusted_width == 0.0:
f_adjusted_width = shared.PIANO_ROLL_SNAP_VALUE
else:
f_adjusted_width = clip_min(
f_pos_x,
shared.PIANO_ROLL_MIN_NOTE_LENGTH,
)
f_item.resize_rect.setWidth(int(f_adjusted_width))
f_item.setRect(f_item.resize_rect)
f_item.setPos(f_item.resize_pos.x(), f_item.resize_pos.y())
# Does not work on Wayland
# QCursor.setPos(QCursor.pos().x(), self.mouse_y_pos)
def _mm_move(self, f_item, preview):
f_pos_x = f_item.pos().x()
f_pos_y = f_item.pos().y()
if f_pos_x < shared.PIANO_KEYS_WIDTH:
f_pos_x = shared.PIANO_KEYS_WIDTH
elif f_pos_x > shared.PIANO_ROLL_GRID_MAX_START_TIME:
f_pos_x = shared.PIANO_ROLL_GRID_MAX_START_TIME
if f_pos_y < _shared.PIANO_ROLL_HEADER_HEIGHT:
f_pos_y = _shared.PIANO_ROLL_HEADER_HEIGHT
elif f_pos_y > shared.PIANO_ROLL_TOTAL_HEIGHT:
f_pos_y = shared.PIANO_ROLL_TOTAL_HEIGHT
f_pos_y = (
int((f_pos_y - _shared.PIANO_ROLL_HEADER_HEIGHT) / self.note_height)
* self.note_height
) + _shared.PIANO_ROLL_HEADER_HEIGHT
if shared.PIANO_ROLL_SNAP:
f_pos_x = (
int((f_pos_x - shared.PIANO_KEYS_WIDTH) / shared.PIANO_ROLL_SNAP_VALUE)
* shared.PIANO_ROLL_SNAP_VALUE
) + shared.PIANO_KEYS_WIDTH
f_item.setPos(f_pos_x, f_pos_y)
f_new_note = self.y_pos_to_note(f_pos_y)
f_item.update_note_text(f_new_note)
if preview:
orig_note = f_item.note_item.note_num
if orig_note not in PREVIEW_NOTES and not f_item.previewer:
PREVIEW_NOTES.add(orig_note)
f_item.previewer = NotePreviewer()
if f_item.previewer:
f_item.previewer.update(f_new_note)
def mouseMoveEvent(self, a_event):
if a_event.button() == QtCore.Qt.MouseButton.RightButton:
return
if self.selection_toggle:
return
if shared.EDITOR_MODE == shared.EDITOR_MODE_ERASE:
self.delete_later()
return
if self.is_velocity_dragging or self.is_velocity_curving:
f_pos = qt_event_pos(a_event)
f_y = f_pos.y()
f_diff_y = self.orig_y - f_y
f_val = f_diff_y * 0.5
else:
QGraphicsRectItem.mouseMoveEvent(self, a_event)
if self.is_resizing:
f_pos_x = qt_event_pos(a_event).x()
self.resize_last_mouse_pos = qt_event_pos(a_event).x()
selected_items = list(shared.PIANO_ROLL_EDITOR.get_selected_items())
unique_notes = {x.note_item.note_num for x in selected_items}
for f_item in selected_items:
if self.is_resizing:
self._mm_resize(f_item, f_pos_x)
elif self.is_velocity_dragging:
self._mm_vel_drag(f_item, f_val)
elif self.is_velocity_curving:
self._mm_vel_curve(f_item, f_val)
else:
self._mm_move(f_item, len(unique_notes) <= 6)
def y_pos_to_note(self, a_y):
return int(
shared.PIANO_ROLL_NOTE_COUNT
- ((a_y - _shared.PIANO_ROLL_HEADER_HEIGHT) / shared.PIANO_ROLL_NOTE_HEIGHT)
)
def _mr_resize(self, f_item, f_pos_x, f_recip):
f_new_note_length = (
(f_pos_x + f_item.rect().width() - shared.PIANO_KEYS_WIDTH)
* f_recip
* shared.CURRENT_ITEM_LEN
) - f_item.resize_start_pos
if shared.PIANO_ROLL_SNAP and f_new_note_length < shared.PIANO_ROLL_SNAP_BEATS:
f_new_note_length = shared.PIANO_ROLL_SNAP_BEATS
elif f_new_note_length < min_note_length:
f_new_note_length = min_note_length
f_item.note_item.set_length(f_new_note_length)
def _mr_copy(self, f_item, f_pos_x, f_recip, f_new_selection):
f_pos_y = f_item.pos().y()
f_new_note_start = (
(f_pos_x - shared.PIANO_KEYS_WIDTH) * shared.CURRENT_ITEM_LEN * f_recip
)
f_new_note_num = self.y_pos_to_note(f_pos_y)
f_new_note = sg_project.MIDINote(
f_new_note_start,
f_item.note_item.length,
f_new_note_num,
f_item.note_item.velocity,
f_item.note_item.pan,
f_item.note_item.attack,
f_item.note_item.decay,
f_item.note_item.sustain,
f_item.note_item.release,
f_item.note_item.channel,
)
shared.CURRENT_ITEM.add_note(f_new_note, False)
# pass a ref instead of a str in case
# fix_overlaps() modifies it.
f_item.note_item = f_new_note
f_new_selection.append(f_item)
def _mr_move(self, f_item, f_pos_x, f_recip):
f_pos_y = f_item.pos().y()
f_new_note_start = (
(f_pos_x - shared.PIANO_KEYS_WIDTH) * shared.CURRENT_ITEM_LEN * f_recip
)
f_new_note_num = self.y_pos_to_note(f_pos_y)
shared.CURRENT_ITEM.notes.remove(f_item.note_item)
f_item.note_item.set_start(f_new_note_start)
f_item.note_item.note_num = f_new_note_num
shared.CURRENT_ITEM.notes.append(f_item.note_item)
shared.CURRENT_ITEM.notes.sort()
def mouseReleaseEvent(self, a_event):
if a_event.button() == QtCore.Qt.MouseButton.RightButton:
return
if self.selection_toggle:
shared.PIANO_ROLL_EDITOR.selected_note_strings.clear()
for item in shared.PIANO_ROLL_EDITOR.get_selected_items():
shared.PIANO_ROLL_EDITOR.selected_note_strings.append(
item.get_selected_string(),
)
self.selection_toggle = False
return
PREVIEW_NOTES.clear()
if _shared.PIANO_ROLL_DELETE_MODE:
_shared.piano_roll_set_delete_mode(False)
return
a_event.setAccepted(True)
f_recip = 1.0 / shared.PIANO_ROLL_GRID_WIDTH
QGraphicsRectItem.mouseReleaseEvent(self, a_event)
if self.is_copying:
f_new_selection = []
for f_item in shared.PIANO_ROLL_EDITOR.get_selected_items():
f_item.previewer = None
f_pos_x = f_item.pos().x()
if self.is_resizing:
self._mr_resize(f_item, f_pos_x, f_recip)
elif self.is_velocity_dragging or self.is_velocity_curving:
pass
elif self.is_copying:
self._mr_copy(
f_item,
f_pos_x,
f_recip,
f_new_selection,
)
else:
self._mr_move(f_item, f_pos_x, f_recip)
if self.is_resizing:
shared.LAST_NOTE_RESIZE = self.note_item.length
shared.CURRENT_ITEM.fix_overlaps()
_shared.SELECTED_PIANO_NOTE = None
shared.PIANO_ROLL_EDITOR.selected_note_strings.clear()
if self.is_copying:
for f_new_item in f_new_selection:
shared.PIANO_ROLL_EDITOR.selected_note_strings.append(
f_new_item.get_selected_string(),
)
else:
for f_item in shared.PIANO_ROLL_EDITOR.get_selected_items():
shared.PIANO_ROLL_EDITOR.selected_note_strings.append(
f_item.get_selected_string(),
)
for f_item in shared.PIANO_ROLL_EDITOR.note_items:
f_item.is_resizing = False
f_item.is_copying = False
f_item.is_velocity_dragging = False
f_item.is_velocity_curving = False
f_item.selection_toggle = False
global_save_and_reload_items()
self.showing_resize_cursor = False
QApplication.restoreOverrideCursor()
shared.PIANO_ROLL_EDITOR.click_enabled = True
|
command | set_config | import ast
def set_config(engine, cmdline):
"""
Set one or more Plover config options upon executing a stroke pattern.
Syntax:
{PLOVER:SET_CONFIG:option:value}
{PLOVER:SET_CONFIG:option1:value1,option2:value2,...}
Example usage:
"O*EP": "{PLOVER:SET_CONFIG:'translation_frame_opacity':100}",
"STA*RT": "{PLOVER:SET_CONFIG:'start_attached':True,'start_capitalized':True}",
Be careful with nested quotes. Plover's JSON dictionaries use double quotes
by default, so use single quotes for config option names and other strings.
"""
# Each config setting can be processed as a key:value pair in a dict.
# The engine.config property setter will update all settings at once.
engine.config = _cmdline_to_dict(cmdline)
def _cmdline_to_dict(cmdline):
"""Add braces and parse the entire command line as a Python dict literal."""
try:
opt_dict = ast.literal_eval("{" + cmdline + "}")
assert isinstance(opt_dict, dict)
return opt_dict
except (AssertionError, SyntaxError, ValueError) as e:
raise ValueError(
'Bad command string "%s" for PLOVER:SET_CONFIG.\n' % cmdline
+ "See for reference:\n\n"
+ set_config.__doc__
) from e
|
PyObjCTest | test_cgimagedestination | from CoreFoundation import CFArrayRef
from Foundation import NSMutableData
from PyObjCTools.TestSupport import *
from Quartz import *
try:
unicode
except NameError:
unicode = str
try:
long
except NameError:
long = int
import os
import sys
if sys.version_info[0] != 2:
def buffer(value):
if isinstance(value, bytes):
return value
return value.encode("latin1")
class TestCGImageDestination(TestCase):
def testTypes(self):
self.assertIsCFType(CGImageDestinationRef)
def testConstants(self):
self.assertIsInstance(kCGImageDestinationLossyCompressionQuality, unicode)
self.assertIsInstance(kCGImageDestinationBackgroundColor, unicode)
def testFunctions(self):
self.assertIsInstance(CGImageDestinationGetTypeID(), (int, long))
self.assertResultIsCFRetained(CGImageDestinationCopyTypeIdentifiers)
v = CGImageDestinationCopyTypeIdentifiers()
self.assertIsInstance(v, CFArrayRef)
if v:
self.assertIsInstance(v[0], unicode)
data = NSMutableData.data()
self.assertResultIsCFRetained(CGImageDestinationCreateWithData)
dest = CGImageDestinationCreateWithData(data, v[0], 1, None)
self.assertIsInstance(dest, CGImageDestinationRef)
url = CFURLCreateWithFileSystemPath(
None, "/tmp/pyobjc.test.pdf", kCFURLPOSIXPathStyle, False
)
self.assertResultIsCFRetained(CGImageDestinationCreateWithURL)
dest = CGImageDestinationCreateWithURL(url, "public.tiff", 2, None)
self.assertIsInstance(dest, CGImageDestinationRef)
CGImageDestinationSetProperties(
dest, {b"key".decode("latin1"): b"value".decode("latin1")}
)
provider = CGDataProviderCreateWithCFData(buffer("1" * 4 * 100 * 80))
img = CGImageCreate(
100,
80,
8,
32,
400,
CGColorSpaceCreateDeviceRGB(),
kCGImageAlphaPremultipliedLast,
provider,
None,
False,
kCGRenderingIntentDefault,
)
self.assertIsInstance(img, CGImageRef)
CGImageDestinationAddImage(dest, img, None)
image_path = "/System/Library//ColorSync/Calibrators/Display Calibrator.app/Contents/Resources/bullet.tif"
if not os.path.exists(image_path):
image_path = "/System/Library//ColorSync/Calibrators/Display Calibrator.app/Contents/Resources/brightness.png"
if not os.path.exists(image_path):
image_path = "/System/Library//ColorSync/Calibrators/Display Calibrator.app/Contents/Resources/brightness.tiff"
url = CFURLCreateWithFileSystemPath(
None, image_path, kCFURLPOSIXPathStyle, False
)
isrc = CGImageSourceCreateWithURL(url, None)
CGImageDestinationAddImageFromSource(dest, isrc, 0, None)
self.assertResultHasType(CGImageDestinationFinalize, objc._C_BOOL)
v = CGImageDestinationFinalize(dest)
self.assertIsInstance(v, bool)
self.assertIs(v, True)
dta = NSMutableData.alloc().init()
cons = CGDataConsumerCreateWithCFData(dta)
self.assertResultIsCFRetained(CGImageDestinationCreateWithDataConsumer)
c = CGImageDestinationCreateWithDataConsumer(cons, "public.tiff", 1, None)
self.assertIsInstance(c, CGImageDestinationRef)
@min_os_level("10.8")
def testConstants10_8(self):
self.assertIsInstance(kCGImageDestinationMetadata, unicode)
self.assertIsInstance(kCGImageDestinationMergeMetadata, unicode)
self.assertIsInstance(kCGImageMetadataShouldExcludeXMP, unicode)
self.assertIsInstance(kCGImageDestinationDateTime, unicode)
self.assertIsInstance(kCGImageDestinationOrientation, unicode)
@min_os_level("10.8")
def testFunctions10_8(self):
CGImageDestinationAddImageAndMetadata
self.assertResultHasType(CGImageDestinationCopyImageSource, objc._C_BOOL)
self.assertArgIsOut(CGImageDestinationCopyImageSource, 3)
if __name__ == "__main__":
main()
|
mystran | writer | # ***************************************************************************
# * Copyright (c) 2021 Bernd Hahnebach <bernd@bimstatik.org> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "Mystran Writer"
__author__ = "Bernd Hahnebach"
__url__ = "http://www.freecad.org"
## \addtogroup FEM
# @{
import time
from os.path import join
import FreeCAD
# we need to import FreeCAD before the non FreeCAD library because of the print
try:
from pyNastran.bdf.bdf import BDF
except Exception:
FreeCAD.Console.PrintError(
"Module pyNastran not found. Writing Mystran solver input will not be work.\n"
)
from .. import writerbase
from . import (
add_con_fixed,
add_con_force,
add_femelement_geometry,
add_femelement_material,
add_mesh,
add_solver_control,
)
class FemInputWriterMystran(writerbase.FemInputWriter):
def __init__(
self,
analysis_obj,
solver_obj,
mesh_obj,
member,
dir_name=None,
mat_geo_sets=None,
):
writerbase.FemInputWriter.__init__(
self, analysis_obj, solver_obj, mesh_obj, member, dir_name, mat_geo_sets
)
# basename (only for implementation purpose later delete this code
# the mesh should never be None for Calculix solver
# working dir and input file
if self.mesh_object is not None:
self.basename = self.mesh_object.Name
else:
self.basename = "Mesh"
self.solverinput_file = join(self.dir_name, self.basename + ".bdf")
self.pynasinput_file = join(self.dir_name, self.basename + ".py")
FreeCAD.Console.PrintLog(
"FemInputWriterMystran --> self.dir_name --> {}\n".format(self.dir_name)
)
FreeCAD.Console.PrintMessage(
"FemInputWriterMystra --> self.solverinput_file --> {}\n".format(
self.solverinput_file
)
)
FreeCAD.Console.PrintMessage(
"FemInputWriterMystra --> self.pynasf_name --> {}\n".format(
self.pynasinput_file
)
)
def write_solver_input(self):
timestart = time.process_time()
model = BDF()
pynasf = open(self.pynasinput_file, "w")
# comment and model init
pynasf.write("# written by FreeCAD\n\n")
pynasf.write("from pyNastran.bdf.bdf import BDF\n")
pynasf.write("model = BDF()\n\n")
model = add_mesh.add_mesh(pynasf, model, self)
model = add_femelement_material.add_femelement_material(pynasf, model, self)
model = add_femelement_geometry.add_femelement_geometry(pynasf, model, self)
model = add_con_force.add_con_force(pynasf, model, self)
model = add_con_fixed.add_con_fixed(pynasf, model, self)
model = add_solver_control.add_solver_control(pynasf, model, self)
pynasf.write(
"\n\nmodel.write_bdf('{}', enddata=True)\n".format(
join(self.dir_name, self.basename + "_pyNas.bdf")
)
)
pynasf.close()
# print(model.get_bdf_stats())
model.write_bdf(self.solverinput_file, enddata=True)
writing_time_string = "Writing time input file: {} seconds".format(
round((time.process_time() - timestart), 2)
)
FreeCAD.Console.PrintMessage(writing_time_string + " \n\n")
return self.solverinput_file
## @}
|
components | cas_auth | from os import environ
from django.core.exceptions import ImproperlyConfigured
# We default to a live demo CAS server to facilitate QA and regression
# testing. The following credentials can be used to authenticate:
# Username: admin
# Password: django-cas-ng
CAS_DEMO_SERVER_URL = "https://django-cas-ng-demo-server.herokuapp.com/cas/"
CAS_SERVER_URL = environ.get("AUTH_CAS_SERVER_URL", CAS_DEMO_SERVER_URL)
ALLOWED_CAS_VERSION_VALUES = ("1", "2", "3", "CAS_2_SAML_1_0")
CAS_VERSION = environ.get("AUTH_CAS_PROTOCOL_VERSION", "3")
if CAS_VERSION not in ALLOWED_CAS_VERSION_VALUES:
raise ImproperlyConfigured(
(
"Unexpected value for AUTH_CAS_PROTOCOL_VERSION: {}. "
"Supported values: '1', '2', '3', or 'CAS_2_SAML_1_0'."
).format(CAS_VERSION)
)
CAS_CHECK_ADMIN_ATTRIBUTES = environ.get("AUTH_CAS_CHECK_ADMIN_ATTRIBUTES", False)
CAS_ADMIN_ATTRIBUTE = environ.get("AUTH_CAS_ADMIN_ATTRIBUTE", None)
CAS_ADMIN_ATTRIBUTE_VALUE = environ.get("AUTH_CAS_ADMIN_ATTRIBUTE_VALUE", None)
CAS_AUTOCONFIGURE_EMAIL = environ.get("AUTH_CAS_AUTOCONFIGURE_EMAIL", False)
CAS_EMAIL_DOMAIN = environ.get("AUTH_CAS_EMAIL_DOMAIN", None)
CAS_LOGIN_MSG = None
CAS_LOGIN_URL_NAME = "login"
CAS_LOGOUT_URL_NAME = "logout"
|
tasks | notify_ical_schedule_shift | import datetime
import json
import typing
from typing import TYPE_CHECKING
from apps.schedules.ical_utils import calculate_shift_diff, parse_event_uid
from apps.slack.client import SlackClient
from apps.slack.errors import (
SlackAPIChannelArchivedError,
SlackAPIChannelNotFoundError,
SlackAPIInvalidAuthError,
SlackAPITokenError,
)
from apps.slack.scenarios import scenario_step
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
from django.utils import timezone
from .task_logger import task_logger
if TYPE_CHECKING:
from apps.schedules.models import OnCallSchedule
def convert_prev_shifts_to_new_format(
prev_shifts: dict, schedule: "OnCallSchedule"
) -> list:
new_prev_shifts = []
user_ids = []
users_info: typing.Dict[int, typing.Dict[str, str]] = {}
for shift in prev_shifts.values():
user_ids.extend(shift.get("users", []))
prev_users = schedule.organization.users.filter(id__in=user_ids)
for user in prev_users:
users_info.setdefault(
user.id,
{
"display_name": user.username,
"email": user.email,
"pk": user.public_primary_key,
"avatar_full": user.avatar_full_url,
},
)
for uid, shift in prev_shifts.items():
shift_pk, _ = parse_event_uid(uid)
new_prev_shifts.append(
{
"users": [users_info[user_pk] for user_pk in shift["users"]],
"start": shift["start"],
"end": shift["end"],
"all_day": shift["all_day"],
"priority_level": shift["priority"],
"shift": {"pk": shift_pk},
}
)
return new_prev_shifts
@shared_dedicated_queue_retry_task()
def notify_ical_schedule_shift(schedule_pk):
task_logger.info(f"Start notify ical schedule shift {schedule_pk}")
from apps.schedules.models import OnCallSchedule
try:
schedule = OnCallSchedule.objects.get(
pk=schedule_pk,
cached_ical_file_primary__isnull=False,
channel__isnull=False,
)
except OnCallSchedule.DoesNotExist:
task_logger.info(
f"Trying to notify ical schedule shift for non-existing schedule {schedule_pk}"
)
return
if schedule.organization.slack_team_identity is None:
task_logger.info(
f"Trying to notify ical schedule shift with no slack team identity {schedule_pk}, "
f"organization {schedule.organization_id}"
)
return
elif schedule.organization.deleted_at:
task_logger.info(
f"Trying to notify ical schedule shift from deleted organization {schedule_pk}, "
f"organization {schedule.organization_id}"
)
return
task_logger.info(
f"Notify ical schedule shift {schedule_pk}, organization {schedule.organization_id}"
)
MIN_DAYS_TO_LOOKUP_FOR_THE_END_OF_EVENT = 3
now = datetime.datetime.now(timezone.utc)
current_shifts = schedule.final_events(
now, now, with_empty=False, with_gap=False, ignore_untaken_swaps=True
)
prev_shifts = (
json.loads(schedule.current_shifts) if not schedule.empty_oncall else []
)
prev_shifts_updated = False
# convert prev_shifts to new events format for compatibility with the previous version of this task
if prev_shifts and isinstance(prev_shifts, dict):
prev_shifts = convert_prev_shifts_to_new_format(prev_shifts, schedule)
prev_shifts_updated = True
# convert datetimes which was dumped to str back to datetime to calculate shift diff correct
str_format = "%Y-%m-%d %X%z"
for prev_shift in prev_shifts:
prev_shift["start"] = datetime.datetime.strptime(
prev_shift["start"], str_format
)
prev_shift["end"] = datetime.datetime.strptime(prev_shift["end"], str_format)
shift_changed, diff_shifts = calculate_shift_diff(current_shifts, prev_shifts)
# Do not notify if there is no difference between current and previous shifts
if not shift_changed:
task_logger.info(
f"No shift diff found for schedule {schedule_pk}, organization {schedule.organization_id}"
)
# If prev shifts were converted to a new format, update related field in db
if prev_shifts_updated:
schedule.current_shifts = json.dumps(current_shifts, default=str)
schedule.save(update_fields=["current_shifts"])
return
new_shifts = sorted(diff_shifts, key=lambda shift: shift["start"])
# get days_to_lookup for next shifts
if len(new_shifts) != 0:
max_end_date = max([shift["end"].date() for shift in new_shifts])
days_to_lookup = (max_end_date - now.date()).days + 1
days_to_lookup = max([days_to_lookup, MIN_DAYS_TO_LOOKUP_FOR_THE_END_OF_EVENT])
else:
days_to_lookup = MIN_DAYS_TO_LOOKUP_FOR_THE_END_OF_EVENT
datetime_end = now + datetime.timedelta(days=days_to_lookup)
next_shifts_unfiltered = schedule.final_events(
now, datetime_end, with_empty=False, with_gap=False, ignore_untaken_swaps=True
)
# drop events that already started
next_shifts = []
for next_shift in next_shifts_unfiltered:
if now < next_shift["start"]:
next_shifts.append(next_shift)
upcoming_shifts = []
# Add the earliest next_shift
if len(next_shifts) > 0:
earliest_shift = next_shifts[0]
upcoming_shifts.append(earliest_shift)
# Check if there are next shifts with the same start as the earliest
for shift in next_shifts[1:]:
if shift["start"] == earliest_shift["start"]:
upcoming_shifts.append(shift)
schedule.empty_oncall = len(current_shifts) == 0
if not schedule.empty_oncall:
schedule.current_shifts = json.dumps(current_shifts, default=str)
schedule.save(update_fields=["current_shifts", "empty_oncall"])
if len(new_shifts) > 0 or schedule.empty_oncall:
task_logger.info(f"new_shifts: {new_shifts}")
if (
schedule.notify_oncall_shift_freq
!= OnCallSchedule.NotifyOnCallShiftFreq.NEVER
):
slack_client = SlackClient(schedule.organization.slack_team_identity)
step = scenario_step.ScenarioStep.get_step(
"schedules", "EditScheduleShiftNotifyStep"
)
report_blocks = step.get_report_blocks_ical(
new_shifts, upcoming_shifts, schedule, schedule.empty_oncall
)
try:
slack_client.chat_postMessage(
channel=schedule.channel,
blocks=report_blocks,
text=f"On-call shift for schedule {schedule.name} has changed",
)
except (
SlackAPITokenError,
SlackAPIChannelNotFoundError,
SlackAPIChannelArchivedError,
SlackAPIInvalidAuthError,
):
pass
|
cherrypy | _cpserver | """Manage HTTP servers with CherryPy."""
import warnings
import cherrypy
from cherrypy._cpcompat import basestring, py3k
from cherrypy.lib import attributes
# We import * because we want to export check_port
# et al as attributes of this module.
from cherrypy.process.servers import *
class Server(ServerAdapter):
"""An adapter for an HTTP server.
You can set attributes (like socket_host and socket_port)
on *this* object (which is probably cherrypy.server), and call
quickstart. For example::
cherrypy.server.socket_port = 80
cherrypy.quickstart()
"""
socket_port = 8080
"""The TCP port on which to listen for connections."""
_socket_host = "127.0.0.1"
def _get_socket_host(self):
return self._socket_host
def _set_socket_host(self, value):
if value == "":
raise ValueError(
"The empty string ('') is not an allowed value. "
"Use '0.0.0.0' instead to listen on all active "
"interfaces (INADDR_ANY)."
)
self._socket_host = value
socket_host = property(
_get_socket_host,
_set_socket_host,
doc="""The hostname or IP address on which to listen for connections.
Host values may be any IPv4 or IPv6 address, or any valid hostname.
The string 'localhost' is a synonym for '127.0.0.1' (or '::1', if
your hosts file prefers IPv6). The string '0.0.0.0' is a special
IPv4 entry meaning "any active interface" (INADDR_ANY), and '::'
is the similar IN6ADDR_ANY for IPv6. The empty string or None are
not allowed.""",
)
socket_file = None
"""If given, the name of the UNIX socket to use instead of TCP/IP.
When this option is not None, the `socket_host` and `socket_port` options
are ignored."""
socket_queue_size = 5
"""The 'backlog' argument to socket.listen(); specifies the maximum number
of queued connections (default 5)."""
socket_timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
accepted_queue_size = -1
"""The maximum number of requests which will be queued up before
the server refuses to accept it (default -1, meaning no limit)."""
accepted_queue_timeout = 10
"""The timeout in seconds for attempting to add a request to the
queue when the queue is full (default 10)."""
shutdown_timeout = 5
"""The time to wait for HTTP worker threads to clean up."""
protocol_version = "HTTP/1.1"
"""The version string to write in the Status-Line of all HTTP responses,
for example, "HTTP/1.1" (the default). Depending on the HTTP server used,
this should also limit the supported features used in the response."""
thread_pool = 10
"""The number of worker threads to start up in the pool."""
thread_pool_max = -1
"""The maximum size of the worker-thread pool. Use -1 to indicate no limit.
"""
max_request_header_size = 500 * 1024
"""The maximum number of bytes allowable in the request headers.
If exceeded, the HTTP server should return "413 Request Entity Too Large".
"""
max_request_body_size = 100 * 1024 * 1024
"""The maximum number of bytes allowable in the request body. If exceeded,
the HTTP server should return "413 Request Entity Too Large"."""
instance = None
"""If not None, this should be an HTTP server instance (such as
CPWSGIServer) which cherrypy.server will control. Use this when you need
more control over object instantiation than is available in the various
configuration options."""
ssl_context = None
"""When using PyOpenSSL, an instance of SSL.Context."""
ssl_certificate = None
"""The filename of the SSL certificate to use."""
ssl_certificate_chain = None
"""When using PyOpenSSL, the certificate chain to pass to
Context.load_verify_locations."""
ssl_private_key = None
"""The filename of the private key to use with SSL."""
if py3k:
ssl_module = "builtin"
"""The name of a registered SSL adaptation module to use with
the builtin WSGI server. Builtin options are: 'builtin' (to
use the SSL library built into recent versions of Python).
You may also register your own classes in the
wsgiserver.ssl_adapters dict."""
else:
ssl_module = "pyopenssl"
"""The name of a registered SSL adaptation module to use with the
builtin WSGI server. Builtin options are 'builtin' (to use the SSL
library built into recent versions of Python) and 'pyopenssl' (to
use the PyOpenSSL project, which you must install separately). You
may also register your own classes in the wsgiserver.ssl_adapters
dict."""
statistics = False
"""Turns statistics-gathering on or off for aware HTTP servers."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
wsgi_version = (1, 0)
"""The WSGI version tuple to use with the builtin WSGI server.
The provided options are (1, 0) [which includes support for PEP 3333,
which declares it covers WSGI version 1.0.1 but still mandates the
wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
You may create and register your own experimental versions of the WSGI
protocol by adding custom classes to the wsgiserver.wsgi_gateways dict."""
def __init__(self):
self.bus = cherrypy.engine
self.httpserver = None
self.interrupt = None
self.running = False
def httpserver_from_self(self, httpserver=None):
"""Return a (httpserver, bind_addr) pair based on self attributes."""
if httpserver is None:
httpserver = self.instance
if httpserver is None:
from cherrypy import _cpwsgi_server
httpserver = _cpwsgi_server.CPWSGIServer(self)
if isinstance(httpserver, basestring):
# Is anyone using this? Can I add an arg?
httpserver = attributes(httpserver)(self)
return httpserver, self.bind_addr
def start(self):
"""Start the HTTP server."""
if not self.httpserver:
self.httpserver, self.bind_addr = self.httpserver_from_self()
ServerAdapter.start(self)
start.priority = 75
def _get_bind_addr(self):
if self.socket_file:
return self.socket_file
if self.socket_host is None and self.socket_port is None:
return None
return (self.socket_host, self.socket_port)
def _set_bind_addr(self, value):
if value is None:
self.socket_file = None
self.socket_host = None
self.socket_port = None
elif isinstance(value, basestring):
self.socket_file = value
self.socket_host = None
self.socket_port = None
else:
try:
self.socket_host, self.socket_port = value
self.socket_file = None
except ValueError:
raise ValueError(
"bind_addr must be a (host, port) tuple "
"(for TCP sockets) or a string (for Unix "
"domain sockets), not %r" % value
)
bind_addr = property(
_get_bind_addr,
_set_bind_addr,
doc="A (host, port) tuple for TCP sockets or " "a str for Unix domain sockets.",
)
def base(self):
"""Return the base (scheme://host[:port] or sock file) for this server."""
if self.socket_file:
return self.socket_file
host = self.socket_host
if host in ("0.0.0.0", "::"):
# 0.0.0.0 is INADDR_ANY and :: is IN6ADDR_ANY.
# Look up the host name, which should be the
# safest thing to spit out in a URL.
import socket
host = socket.gethostname()
port = self.socket_port
if self.ssl_certificate:
scheme = "https"
if port != 443:
host += ":%s" % port
else:
scheme = "http"
if port != 80:
host += ":%s" % port
return "%s://%s" % (scheme, host)
|
Settings | SimpleModeSettingsManager | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from PyQt6.QtCore import QObject, pyqtProperty, pyqtSignal
from UM.Application import Application
class SimpleModeSettingsManager(QObject):
def __init__(self, parent=None):
super().__init__(parent)
self._machine_manager = Application.getInstance().getMachineManager()
self._is_profile_customized = (
False # True when default profile has user changes
)
self._is_profile_user_created = (
False # True when profile was custom created by user
)
self._machine_manager.activeStackValueChanged.connect(
self._updateIsProfileCustomized
)
# update on create as the activeQualityChanged signal is emitted before this manager is created when Cura starts
self._updateIsProfileCustomized()
isProfileCustomizedChanged = pyqtSignal()
@pyqtProperty(bool, notify=isProfileCustomizedChanged)
def isProfileCustomized(self):
return self._is_profile_customized
def _updateIsProfileCustomized(self):
user_setting_keys = set()
if not self._machine_manager.activeMachine:
return False
global_stack = self._machine_manager.activeMachine
# check user settings in the global stack
user_setting_keys.update(global_stack.userChanges.getAllKeys())
# check user settings in the extruder stacks
if global_stack.extruderList:
for extruder_stack in global_stack.extruderList:
user_setting_keys.update(extruder_stack.userChanges.getAllKeys())
has_customized_user_settings = len(user_setting_keys) > 0
if has_customized_user_settings != self._is_profile_customized:
self._is_profile_customized = has_customized_user_settings
self.isProfileCustomizedChanged.emit()
|
extractor | mlb | from __future__ import unicode_literals
import re
from ..utils import determine_ext, int_or_none, parse_duration, parse_iso8601, try_get
from .common import InfoExtractor
class MLBBaseIE(InfoExtractor):
def _real_extract(self, url):
display_id = self._match_id(url)
video = self._download_video_data(display_id)
video_id = video["id"]
title = video["title"]
feed = self._get_feed(video)
formats = []
for playback in feed.get("playbacks") or []:
playback_url = playback.get("url")
if not playback_url:
continue
name = playback.get("name")
ext = determine_ext(playback_url)
if ext == "m3u8":
formats.extend(
self._extract_m3u8_formats(
playback_url,
video_id,
"mp4",
"m3u8_native",
m3u8_id=name,
fatal=False,
)
)
else:
f = {
"format_id": name,
"url": playback_url,
}
mobj = re.search(r"_(\d+)K_(\d+)X(\d+)", name)
if mobj:
f.update(
{
"height": int(mobj.group(3)),
"tbr": int(mobj.group(1)),
"width": int(mobj.group(2)),
}
)
mobj = re.search(r"_(\d+)x(\d+)_(\d+)_(\d+)K\.mp4", playback_url)
if mobj:
f.update(
{
"fps": int(mobj.group(3)),
"height": int(mobj.group(2)),
"tbr": int(mobj.group(4)),
"width": int(mobj.group(1)),
}
)
formats.append(f)
self._sort_formats(formats)
thumbnails = []
for cut in try_get(feed, lambda x: x["image"]["cuts"], list) or []:
src = cut.get("src")
if not src:
continue
thumbnails.append(
{
"height": int_or_none(cut.get("height")),
"url": src,
"width": int_or_none(cut.get("width")),
}
)
language = (video.get("language") or "EN").lower()
return {
"id": video_id,
"title": title,
"formats": formats,
"description": video.get("description"),
"duration": parse_duration(feed.get("duration")),
"thumbnails": thumbnails,
"timestamp": parse_iso8601(video.get(self._TIMESTAMP_KEY)),
"subtitles": self._extract_mlb_subtitles(feed, language),
}
class MLBIE(MLBBaseIE):
_VALID_URL = r"""(?x)
https?://
(?:[\da-z_-]+\.)*mlb\.com/
(?:
(?:
(?:[^/]+/)*video/[^/]+/c-|
(?:
shared/video/embed/(?:embed|m-internal-embed)\.html|
(?:[^/]+/)+(?:play|index)\.jsp|
)\?.*?\bcontent_id=
)
(?P<id>\d+)
)
"""
_TESTS = [
{
"url": "https://www.mlb.com/mariners/video/ackleys-spectacular-catch/c-34698933",
"md5": "632358dacfceec06bad823b83d21df2d",
"info_dict": {
"id": "34698933",
"ext": "mp4",
"title": "Ackley's spectacular catch",
"description": "md5:7f5a981eb4f3cbc8daf2aeffa2215bf0",
"duration": 66,
"timestamp": 1405995000,
"upload_date": "20140722",
"thumbnail": r"re:^https?://.*\.jpg$",
},
},
{
"url": "https://www.mlb.com/video/stanton-prepares-for-derby/c-34496663",
"md5": "bf2619bf9cacc0a564fc35e6aeb9219f",
"info_dict": {
"id": "34496663",
"ext": "mp4",
"title": "Stanton prepares for Derby",
"description": "md5:d00ce1e5fd9c9069e9c13ab4faedfa57",
"duration": 46,
"timestamp": 1405120200,
"upload_date": "20140711",
"thumbnail": r"re:^https?://.*\.jpg$",
},
},
{
"url": "https://www.mlb.com/video/cespedes-repeats-as-derby-champ/c-34578115",
"md5": "99bb9176531adc600b90880fb8be9328",
"info_dict": {
"id": "34578115",
"ext": "mp4",
"title": "Cespedes repeats as Derby champ",
"description": "md5:08df253ce265d4cf6fb09f581fafad07",
"duration": 488,
"timestamp": 1405414336,
"upload_date": "20140715",
"thumbnail": r"re:^https?://.*\.jpg$",
},
},
{
"url": "https://www.mlb.com/video/bautista-on-home-run-derby/c-34577915",
"md5": "da8b57a12b060e7663ee1eebd6f330ec",
"info_dict": {
"id": "34577915",
"ext": "mp4",
"title": "Bautista on Home Run Derby",
"description": "md5:b80b34031143d0986dddc64a8839f0fb",
"duration": 52,
"timestamp": 1405405122,
"upload_date": "20140715",
"thumbnail": r"re:^https?://.*\.jpg$",
},
},
{
"url": "https://www.mlb.com/video/hargrove-homers-off-caldwell/c-1352023483?tid=67793694",
"only_matching": True,
},
{
"url": "http://m.mlb.com/shared/video/embed/embed.html?content_id=35692085&topic_id=6479266&width=400&height=224&property=mlb",
"only_matching": True,
},
{
"url": "http://mlb.mlb.com/shared/video/embed/embed.html?content_id=36599553",
"only_matching": True,
},
{
"url": "http://mlb.mlb.com/es/video/play.jsp?content_id=36599553",
"only_matching": True,
},
{
"url": "https://www.mlb.com/cardinals/video/piscottys-great-sliding-catch/c-51175783",
"only_matching": True,
},
{
# From http://m.mlb.com/news/article/118550098/blue-jays-kevin-pillar-goes-spidey-up-the-wall-to-rob-tim-beckham-of-a-homer
"url": "http://mlb.mlb.com/shared/video/embed/m-internal-embed.html?content_id=75609783&property=mlb&autoplay=true&hashmode=false&siteSection=mlb/multimedia/article_118550098/article_embed&club=mlb",
"only_matching": True,
},
]
_TIMESTAMP_KEY = "date"
@staticmethod
def _get_feed(video):
return video
@staticmethod
def _extract_mlb_subtitles(feed, language):
subtitles = {}
for keyword in feed.get("keywordsAll") or []:
keyword_type = keyword.get("type")
if keyword_type and keyword_type.startswith("closed_captions_location_"):
cc_location = keyword.get("value")
if cc_location:
subtitles.setdefault(language, []).append(
{
"url": cc_location,
}
)
return subtitles
def _download_video_data(self, display_id):
return self._download_json(
"http://content.mlb.com/mlb/item/id/v1/%s/details/web-v1.json" % display_id,
display_id,
)
class MLBVideoIE(MLBBaseIE):
_VALID_URL = r"https?://(?:www\.)?mlb\.com/(?:[^/]+/)*video/(?P<id>[^/?&#]+)"
_TEST = {
"url": "https://www.mlb.com/mariners/video/ackley-s-spectacular-catch-c34698933",
"md5": "632358dacfceec06bad823b83d21df2d",
"info_dict": {
"id": "c04a8863-f569-42e6-9f87-992393657614",
"ext": "mp4",
"title": "Ackley's spectacular catch",
"description": "md5:7f5a981eb4f3cbc8daf2aeffa2215bf0",
"duration": 66,
"timestamp": 1405995000,
"upload_date": "20140722",
"thumbnail": r"re:^https?://.+",
},
}
_TIMESTAMP_KEY = "timestamp"
@classmethod
def suitable(cls, url):
return False if MLBIE.suitable(url) else super(MLBVideoIE, cls).suitable(url)
@staticmethod
def _get_feed(video):
return video["feeds"][0]
@staticmethod
def _extract_mlb_subtitles(feed, language):
subtitles = {}
for cc_location in feed.get("closedCaptions") or []:
subtitles.setdefault(language, []).append(
{
"url": cc_location,
}
)
def _download_video_data(self, display_id):
# https://www.mlb.com/data-service/en/videos/[SLUG]
return self._download_json(
"https://fastball-gateway.mlb.com/graphql",
display_id,
query={
"query": """{
mediaPlayback(ids: "%s") {
description
feeds(types: CMS) {
closedCaptions
duration
image {
cuts {
width
height
src
}
}
playbacks {
name
url
}
}
id
timestamp
title
}
}"""
% display_id,
},
)["data"]["mediaPlayback"][0]
|
versions | 001_103676e0a497_create_existing_tables | # encoding: utf-8
"""Create existing tables
Revision ID: 103676e0a497
Revises:
Create Date: 2018-09-04 16:57:42.622504
"""
import sqlalchemy as sa
from alembic import op
from ckan.migration import skip_based_on_legacy_engine_version
# revision identifiers, used by Alembic.
revision = "103676e0a497"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
if skip_based_on_legacy_engine_version(op, __name__):
return
op.create_table(
"state",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("name", sa.Unicode(100)),
)
op.create_table(
"revision",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("timestamp", sa.DateTime(timezone=False)),
sa.Column("author", sa.Unicode(200)),
sa.Column("message", sa.UnicodeText()),
sa.Column("state_id", sa.Integer),
)
op.create_table(
"apikey",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("name", sa.UnicodeText()),
sa.Column("key", sa.UnicodeText()),
)
op.create_table(
"license",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("name", sa.Unicode(100)),
sa.Column("state_id", sa.Integer),
)
op.create_table(
"package",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("name", sa.Unicode(100), nullable=False, unique=True),
sa.Column("title", sa.UnicodeText()),
sa.Column("version", sa.Unicode(100)),
sa.Column("url", sa.UnicodeText()),
sa.Column("download_url", sa.UnicodeText()),
sa.Column("notes", sa.UnicodeText()),
sa.Column("license_id", sa.Integer, sa.ForeignKey("license.id")),
sa.Column("state_id", sa.Integer, sa.ForeignKey("state.id")),
sa.Column("revision_id", sa.Integer, sa.ForeignKey("revision.id")),
)
op.create_table(
"package_revision",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("name", sa.Unicode(100), nullable=False),
sa.Column("title", sa.UnicodeText()),
sa.Column("version", sa.Unicode(100)),
sa.Column("url", sa.UnicodeText()),
sa.Column("download_url", sa.UnicodeText()),
sa.Column("notes", sa.UnicodeText()),
sa.Column("license_id", sa.Integer, sa.ForeignKey("license.id")),
sa.Column("state_id", sa.Integer, sa.ForeignKey("state.id")),
sa.Column(
"revision_id", sa.Integer, sa.ForeignKey("revision.id"), primary_key=True
),
sa.Column("continuity_id", sa.Integer, sa.ForeignKey("package.id")),
)
op.create_table(
"tag",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("name", sa.Unicode(100), nullable=False, unique=True),
)
op.create_table(
"package_tag",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("package_id", sa.Integer, sa.ForeignKey("package.id")),
sa.Column("tag_id", sa.Integer, sa.ForeignKey("tag.id")),
sa.Column("state_id", sa.Integer, sa.ForeignKey("state.id")),
sa.Column("revision_id", sa.Integer, sa.ForeignKey("revision.id")),
)
op.create_table(
"package_tag_revision",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("package_id", sa.Integer, sa.ForeignKey("package.id")),
sa.Column("tag_id", sa.Integer, sa.ForeignKey("tag.id")),
sa.Column("state_id", sa.Integer, sa.ForeignKey("state.id")),
sa.Column(
"revision_id", sa.Integer, sa.ForeignKey("revision.id"), primary_key=True
),
sa.Column("continuity_id", sa.Integer, sa.ForeignKey("package_tag.id")),
)
op.create_table(
"package_extra",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("package_id", sa.Integer, sa.ForeignKey("package.id")),
sa.Column("key", sa.UnicodeText()),
sa.Column("value", sa.UnicodeText()),
sa.Column("state_id", sa.Integer, sa.ForeignKey("state.id")),
sa.Column("revision_id", sa.Integer, sa.ForeignKey("revision.id")),
)
op.create_table(
"package_extra_revision",
sa.Column("id", sa.Integer, primary_key=True, nullable=False),
sa.Column("package_id", sa.Integer, sa.ForeignKey("package.id")),
sa.Column("key", sa.UnicodeText()),
sa.Column("value", sa.UnicodeText()),
sa.Column("state_id", sa.Integer, sa.ForeignKey("state.id")),
sa.Column(
"revision_id", sa.Integer, sa.ForeignKey("revision.id"), primary_key=True
),
sa.Column("continuity_id", sa.Integer, sa.ForeignKey("package_extra.id")),
)
def downgrade():
op.drop_table("package_extra_revision")
op.drop_table("package_extra")
op.drop_table("package_tag_revision")
op.drop_table("package_tag")
op.drop_table("tag")
op.drop_table("package_revision")
op.drop_table("package")
op.drop_table("license")
op.drop_table("apikey")
op.drop_table("revision")
op.drop_table("state")
|
utils | test_utils_test | __copyright__ = "Copyright (C) 2014-2016 Martin Blais"
__license__ = "GNU GPLv2"
import io
import os
import unittest
from os import path
from beancount.utils import test_utils
class TestTestUtils(unittest.TestCase):
def test_tempdir(self):
with test_utils.tempdir() as tempdir:
with open(path.join(tempdir, "file1"), "w"):
pass
os.mkdir(path.join(tempdir, "directory"))
with open(path.join(tempdir, "directory", "file2"), "w"):
pass
self.assertFalse(path.exists(tempdir))
self.assertFalse(path.exists(path.join(tempdir, "file1")))
self.assertFalse(path.exists(path.join(tempdir, "directory")))
def test_create_temporary_files(self):
with test_utils.tempdir() as tmp:
test_utils.create_temporary_files(
tmp,
{
"apples.beancount": """
include "{root}/fruits/oranges.beancount"
2014-01-01 open Assets:Apples
""",
"fruits/oranges.beancount": """
2014-01-02 open Assets:Oranges
""",
},
)
# Check the total list of files.
apples = path.join(tmp, "apples.beancount")
oranges = path.join(tmp, "fruits/oranges.beancount")
self.assertEqual(
{apples, oranges},
set(
path.join(root, filename)
for root, _, files in os.walk(tmp)
for filename in files
),
)
# Check the contents of apples (with replacement of root).
with open(apples) as f:
apples_content = f.read()
self.assertRegex(apples_content, "open Assets:Apples")
self.assertNotRegex(apples_content, "{root}")
# Check the contents of oranges.
with open(oranges) as f:
oranges_content = f.read()
self.assertRegex(oranges_content, "open Assets:Oranges")
def test_capture(self):
text = "b9baaa0c-0f0a-47db-bffc-a00c6f4ac1db"
with test_utils.capture() as output:
self.assertTrue(isinstance(output, io.StringIO))
print(text)
self.assertEqual(text + "\n", output.getvalue())
@test_utils.docfile
def test_docfile(self, filename):
"7f9034b1-51e7-420c-ac6b-945b5c594ebf"
with open(filename) as f:
uuid = f.read()
self.assertEqual("7f9034b1-51e7-420c-ac6b-945b5c594ebf", uuid)
@test_utils.docfile_extra(suffix=".txt")
def test_docfile_extra(self, filename):
"7f9034b1-51e7-420c-ac6b-945b5c594ebf"
with open(filename) as f:
uuid = f.read()
self.assertEqual("7f9034b1-51e7-420c-ac6b-945b5c594ebf", uuid)
self.assertTrue(".txt" in filename)
def test_search_words(self):
test_utils.search_words("i walrus is", "i am the walrus is not chicago")
test_utils.search_words("i walrus is".split(), "i am the walrus is not chicago")
def test_environ_contextmanager(self):
with test_utils.environ("PATH", "/unlikely-to-be-your-path"):
self.assertEqual("/unlikely-to-be-your-path", os.getenv("PATH"))
self.assertNotEqual("/unlikely-to-be-your-path", os.getenv("PATH"))
class TestTestCase(test_utils.TestCase):
def test_assertLines(self):
self.assertLines(
"""
43c62bff-8504-44ea-b5c0-afa218a7a973
95ef1cc4-0016-4452-9f4e-1a053db2bc83
""",
"""
43c62bff-8504-44ea-b5c0-afa218a7a973
95ef1cc4-0016-4452-9f4e-1a053db2bc83
""",
)
with self.assertRaises(AssertionError):
self.assertLines(
"""
43c62bff-8504-44ea-b5c0-afa218a7a973
""",
"""
683f111f-f921-4db3-a3e8-daae344981e8
""",
)
def test_assertOutput(self):
with self.assertOutput(
"""
3165efbc-c775-4503-be13-06b7167697a9
"""
):
print("3165efbc-c775-4503-be13-06b7167697a9")
with self.assertRaises(AssertionError):
with self.assertOutput(
"""
3165efbc-c775-4503-be13-06b7167697a9
"""
):
print("78d58502a15e")
class TestSkipIfRaises(unittest.TestCase):
def test_decorator(self):
@test_utils.skipIfRaises(ValueError)
def decorator_no_skip():
pass
decorator_no_skip()
@test_utils.skipIfRaises(ValueError)
def decorator_skip():
raise ValueError
with self.assertRaises(unittest.SkipTest):
decorator_skip()
def test_decorator_many(self):
@test_utils.skipIfRaises(ValueError, IndexError)
def decorator_skip():
raise ValueError
with self.assertRaises(unittest.SkipTest):
decorator_skip()
def test_contextmanager(self):
with test_utils.skipIfRaises(ValueError):
pass
with self.assertRaises(unittest.SkipTest):
with test_utils.skipIfRaises(ValueError):
raise ValueError
@test_utils.nottest
def test_not_really():
assert False
if __name__ == "__main__":
unittest.main()
|
phone-notifications | exceptions | class BaseFailed(Exception):
"""
Failed is base exception for all Failed... exceptions.
This exception is indicates error while performing some phone notification operation.
Optionally can contain graceful_msg attribute. When graceful_msg is provided it mean that error on provider side is
not our fault, but some provider error (number is blocked, fraud guard, ...).
By default, graceful_msg is None - it means that error is our fault (network problems, invalid configuration,...).
Attributes:
graceful_msg: string with some details about exception which can be exposed to caller.
"""
def __init__(self, graceful_msg=None):
self.graceful_msg = graceful_msg
class FailedToMakeCall(BaseFailed):
pass
class FailedToSendSMS(BaseFailed):
pass
class FailedToStartVerification(BaseFailed):
pass
class FailedToFinishVerification(BaseFailed):
pass
class NumberNotVerified(Exception):
pass
class NumberAlreadyVerified(Exception):
pass
class ProviderNotSupports(Exception):
pass
class CallsLimitExceeded(Exception):
pass
class SMSLimitExceeded(Exception):
pass
|
classes | query | """
@file
@brief This file can easily query Clips, Files, and other project data
@author Jonathan Thomas <jonathan@openshot.org>
@section LICENSE
Copyright (c) 2008-2018 OpenShot Studios, LLC
(http://www.openshotstudios.com). This file is part of
OpenShot Video Editor (http://www.openshot.org), an open-source project
dedicated to delivering high quality video editing and animation solutions
to the world.
OpenShot Video Editor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenShot Video Editor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenShot Library. If not, see <http://www.gnu.org/licenses/>.
"""
import copy
import json
import os
from classes import info
from classes.app import get_app
class QueryObject:
"""This class allows one or more project data objects to be queried"""
def __init__(self):
"""Constructor"""
self.id = None # Unique ID of object
self.key = None # Key path to object in project data
self.data = None # Data dictionary of object
self.parent = None # Only used with effects (who belong to clips)
self.type = "insert" # Type of operation needed to save
def save(self, OBJECT_TYPE):
"""Save the object back to the project data store"""
# Insert or Update this data into the project data store
if not self.id and self.type == "insert":
# Insert record, and Generate id
self.id = get_app().project.generate_id()
# save id in data (if attribute found)
self.data["id"] = json.loads(json.dumps(self.id))
# Set key (if needed)
if not self.key:
self.key = json.loads(json.dumps(OBJECT_TYPE.object_key))
self.key.append({"id": self.id})
# Insert into project data
get_app().updates.insert(
json.loads(json.dumps(OBJECT_TYPE.object_key)),
json.loads(json.dumps(self.data)),
)
# Mark record as 'update' now... so another call to this method won't insert it again
self.type = "update"
elif self.id and self.type == "update":
# Update existing project data
get_app().updates.update(self.key, self.data)
def delete(self, OBJECT_TYPE):
"""Delete the object from the project data store"""
# Delete if object found and not pending insert
if self.id and self.type == "update":
# Delete from project data store
get_app().updates.delete(self.key)
self.type = "delete"
def title(self):
"""Get the translated display title of this item"""
# Needs to be overwritten in each derived class
return None
def filter(OBJECT_TYPE, **kwargs):
"""Take any arguments given as filters, and find a list of matching objects"""
# Get a list of all objects of this type
parent = get_app().project.get(OBJECT_TYPE.object_key)
if not parent:
return []
matching_objects = []
# Loop through all children objects
for child in parent:
# Protect against non-iterable/subscriptables
if not child:
continue
# Loop through all kwargs (and look for matches)
match = True
for key, value in kwargs.items():
if key in child and child[key] != value:
match = False
break
# Intersection Position
if key == "intersect" and (
child.get("position", 0) > value
or child.get("position", 0)
+ (child.get("end", 0) - child.get("start", 0))
< value
):
match = False
# Add matched record
if match:
object = OBJECT_TYPE()
object.id = child["id"]
object.key = [OBJECT_TYPE.object_name, {"id": object.id}]
object.data = json.loads(json.dumps(child)) # copy of object
object.type = "update"
matching_objects.append(object)
# Return matching objects
return matching_objects
def get(OBJECT_TYPE, **kwargs):
"""Take any arguments given as filters, and find the first matching object"""
# Look for matching objects
matching_objects = QueryObject.filter(OBJECT_TYPE, **kwargs)
if matching_objects:
return matching_objects[0]
else:
return None
class Clip(QueryObject):
"""This class allows Clips to be queried, updated, and deleted from the project data."""
object_name = "clips" # Derived classes should define this
object_key = [object_name] # Derived classes should define this also
def save(self):
"""Save the object back to the project data store"""
super().save(Clip)
def delete(self):
"""Delete the object from the project data store"""
super().delete(Clip)
def filter(**kwargs):
"""Take any arguments given as filters, and find a list of matching objects"""
return QueryObject.filter(Clip, **kwargs)
def get(**kwargs):
"""Take any arguments given as filters, and find the first matching object"""
return QueryObject.get(Clip, **kwargs)
def title(self):
"""Get the translated display title of this item"""
path = self.data.get("reader", {}).get("path")
return os.path.basename(path)
class Transition(QueryObject):
"""This class allows Transitions (i.e. timeline effects) to be queried, updated, and deleted from the project data."""
object_name = "effects" # Derived classes should define this
object_key = [object_name] # Derived classes should define this also
def save(self):
"""Save the object back to the project data store"""
super().save(Transition)
def delete(self):
"""Delete the object from the project data store"""
super().delete(Transition)
def filter(**kwargs):
"""Take any arguments given as filters, and find a list of matching objects"""
return QueryObject.filter(Transition, **kwargs)
def get(**kwargs):
"""Take any arguments given as filters, and find the first matching object"""
return QueryObject.get(Transition, **kwargs)
def title(self):
"""Get the translated display title of this item"""
path = self.data.get("reader", {}).get("path")
fileBaseName = os.path.splitext(os.path.basename(path))[0]
# split the name into parts (looking for a number)
suffix_number = None
name_parts = fileBaseName.split("_")
if name_parts[-1].isdigit():
suffix_number = name_parts[-1]
# get name of transition
item_name = fileBaseName.replace("_", " ").capitalize()
# replace suffix number with placeholder (if any)
if suffix_number:
item_name = item_name.replace(suffix_number, "%s")
item_name = get_app()._tr(item_name) % suffix_number
else:
item_name = get_app()._tr(item_name)
return item_name
class File(QueryObject):
"""This class allows Files to be queried, updated, and deleted from the project data."""
object_name = "files" # Derived classes should define this
object_key = [object_name] # Derived classes should define this also
def save(self):
"""Save the object back to the project data store"""
super().save(File)
def delete(self):
"""Delete the object from the project data store"""
super().delete(File)
def filter(**kwargs):
"""Take any arguments given as filters, and find a list of matching objects"""
return QueryObject.filter(File, **kwargs)
def get(**kwargs):
"""Take any arguments given as filters, and find the first matching object"""
return QueryObject.get(File, **kwargs)
def absolute_path(self):
"""Get absolute file path of file"""
file_path = self.data["path"]
if os.path.isabs(file_path):
return file_path
# Try to expand path relative to project folder
app = get_app()
if app and hasattr(app, "project") and hasattr(app.project, "current_filepath"):
project_folder = os.path.dirname(app.project.current_filepath)
file_path = os.path.abspath(os.path.join(project_folder, file_path))
return file_path
def relative_path(self):
"""Get relative path (based on the current working directory)"""
file_path = self.absolute_path()
# Convert path to relative (based on current working directory of Python)
return os.path.relpath(file_path, info.CWD)
class Marker(QueryObject):
"""This class allows Markers to be queried, updated, and deleted from the project data."""
object_name = "markers" # Derived classes should define this
object_key = [object_name] # Derived classes should define this also
def save(self):
"""Save the object back to the project data store"""
super().save(Marker)
def delete(self):
"""Delete the object from the project data store"""
super().delete(Marker)
def filter(**kwargs):
"""Take any arguments given as filters, and find a list of matching objects"""
return QueryObject.filter(Marker, **kwargs)
def get(**kwargs):
"""Take any arguments given as filters, and find the first matching object"""
return QueryObject.get(Marker, **kwargs)
class Track(QueryObject):
"""This class allows Tracks to be queried, updated, and deleted from the project data."""
object_name = "layers" # Derived classes should define this
object_key = [object_name] # Derived classes should define this also
def save(self):
"""Save the object back to the project data store"""
super().save(Track)
def delete(self):
"""Delete the object from the project data store"""
super().delete(Track)
def filter(**kwargs):
"""Take any arguments given as filters, and find a list of matching objects"""
return QueryObject.filter(Track, **kwargs)
def get(**kwargs):
"""Take any arguments given as filters, and find the first matching object"""
return QueryObject.get(Track, **kwargs)
def __lt__(self, other):
return self.data.get("number", 0) < other.data.get("number", 0)
def __gt__(self, other):
return self.data.get("number", 0) > other.data.get("number", 0)
class Effect(QueryObject):
"""This class allows Effects to be queried, updated, and deleted from the project data."""
object_name = "effects" # Derived classes should define this
object_key = [object_name] # Derived classes should define this also
def save(self):
"""Save the object back to the project data store"""
super().save(Effect)
def delete(self):
"""Delete the object from the project data store"""
super().delete(Effect)
def filter(**kwargs):
"""Take any arguments given as filters, and find a list of matching objects"""
# Get a list of clips
clips = get_app().project.get("clips")
matching_objects = []
# Loop through all clips
if clips:
for clip in clips:
# Loop through all effects
if "effects" in clip:
for child in clip["effects"]:
# Loop through all kwargs (and look for matches)
match = True
for key, value in kwargs.items():
if key in child and child[key] != value:
match = False
break
# Add matched record
if match:
object = Effect()
object.id = child["id"]
object.key = [
"clips",
{"id": clip["id"]},
"effects",
{"id": object.id},
]
object.data = child
object.type = "update"
object.parent = clip
matching_objects.append(object)
# Return matching objects
return matching_objects
def title(self):
"""Get the translated display title of this item"""
return self.data.get("name") or self.data.get("type")
def get(**kwargs):
"""Take any arguments given as filters, and find the first matching object"""
# Look for matching objects
matching_objects = Effect.filter(**kwargs)
if matching_objects:
return matching_objects[0]
else:
return None
|
gui | uicolor | # This file is part of MyPaint.
# Copyright (C) 2015 by Andrew Chadwick <a.t.chadwick@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""UIColor conversion routines
These are functions which convert
our display color classes (see `lib.color.UIColor`)
to and from GDK's equivalents.
They can't be part of lib/ because of the GDK dependency.
"""
from __future__ import division, print_function
import struct
from lib.color import RGBColor
from lib.gibindings import Gdk
from lib.helpers import clamp
def from_gdk_color(gdk_color):
"""Construct a new UIColor from a Gdk.Color.
>>> from_gdk_color(Gdk.Color(0.0000, 0x8000, 0xffff))
<RGBColor r=0.0000, g=0.5000, b=1.0000>
"""
rgb16 = (gdk_color.red, gdk_color.green, gdk_color.blue)
return RGBColor(*[c / 65535 for c in rgb16])
def to_gdk_color(color):
"""Convert a UIColor to a Gdk.Color.
>>> gcol = to_gdk_color(RGBColor(1,1,1))
>>> gcol.to_string()
'#ffffffffffff'
"""
return Gdk.Color(*[int(c * 65535) for c in color.get_rgb()])
def from_gdk_rgba(gdk_rgba):
"""Construct a new UIColor from a `Gdk.RGBA` (omitting alpha)
>>> from_gdk_rgba(Gdk.RGBA(0.5, 0.8, 0.2, 1))
<RGBColor r=0.5000, g=0.8000, b=0.2000>
"""
rgbflt = (gdk_rgba.red, gdk_rgba.green, gdk_rgba.blue)
return RGBColor(*[clamp(c, 0.0, 1.0) for c in rgbflt])
def to_gdk_rgba(color):
"""Convert to a `GdkRGBA` (with alpha=1.0).
>>> col = RGBColor(1,1,1)
>>> rgba = to_gdk_rgba(col)
>>> rgba.to_string()
'rgb(255,255,255)'
"""
rgba = list(color.get_rgb())
rgba.append(1.0)
return Gdk.RGBA(*rgba)
def from_drag_data(bytes):
"""Construct from drag+dropped bytes of type application/x-color.
The data format is 8 bytes, RRGGBBAA, with assumed native endianness.
Alpha is ignored.
"""
r, g, b, a = [h / 0xFFFF for h in struct.unpack("=HHHH", bytes)]
return RGBColor(r, g, b)
# TODO: check endianness
def to_drag_data(color):
"""Converts to bytes for dragging as application/x-color."""
rgba = [int(c * 0xFFFF) for c in color.get_rgb()]
rgba.append(0xFFFF)
return struct.pack("=HHHH", *rgba)
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
|
localModule | changeStates | import eos.db
import gui.mainFrame
import wx
from gui import globalEvents as GE
from gui.fitCommands.calc.module.localChangeStates import (
CalcChangeLocalModuleStatesCommand,
)
from gui.fitCommands.helpers import InternalCommandHistory, restoreRemovedDummies
from service.fit import Fit
class GuiChangeLocalModuleStatesCommand(wx.Command):
def __init__(self, fitID, mainPosition, positions, click):
wx.Command.__init__(self, True, "Change Local Module States")
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.mainPosition = mainPosition
self.positions = positions
self.click = click
self.savedRemovedDummies = None
def Do(self):
cmd = CalcChangeLocalModuleStatesCommand(
fitID=self.fitID,
mainPosition=self.mainPosition,
positions=self.positions,
click=self.click,
)
success = self.internalHistory.submit(cmd)
sFit = Fit.getInstance()
if cmd.needsGuiRecalc:
eos.db.flush()
sFit.recalc(self.fitID)
self.savedRemovedDummies = sFit.fill(self.fitID)
eos.db.commit()
wx.PostEvent(
gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,))
)
return success
def Undo(self):
sFit = Fit.getInstance()
fit = sFit.getFit(self.fitID)
restoreRemovedDummies(fit, self.savedRemovedDummies)
success = self.internalHistory.undoAll()
eos.db.flush()
sFit.recalc(self.fitID)
sFit.fill(self.fitID)
eos.db.commit()
wx.PostEvent(
gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,))
)
return success
|
browser | urlmarks | # SPDX-FileCopyrightText: Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# SPDX-FileCopyrightText: Antoni Boucher <bouanto@zoho.com>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""Managers for bookmarks and quickmarks.
Note we violate our general QUrl rule by storing url strings in the marks
OrderedDict. This is because we read them from a file at start and write them
to a file on shutdown, so it makes sense to keep them as strings here.
"""
import collections
import functools
import html
import os
import os.path
from typing import MutableMapping
from qutebrowser.api import cmdutils
from qutebrowser.misc import lineparser
from qutebrowser.qt.core import QObject, QUrl, pyqtSignal
from qutebrowser.utils import (
log,
message,
objreg,
qtutils,
standarddir,
urlutils,
usertypes,
)
class Error(Exception):
"""Base class for all errors in this module."""
class InvalidUrlError(Error):
"""Exception emitted when a URL is invalid."""
class DoesNotExistError(Error):
"""Exception emitted when a given URL does not exist."""
class AlreadyExistsError(Error):
"""Exception emitted when a given URL does already exist."""
class UrlMarkManager(QObject):
"""Base class for BookmarkManager and QuickmarkManager.
Attributes:
marks: An OrderedDict of all quickmarks/bookmarks.
_lineparser: The LineParser used for the marks
Signals:
changed: Emitted when anything changed.
"""
changed = pyqtSignal()
_lineparser: lineparser.LineParser
def __init__(self, parent=None):
"""Initialize and read quickmarks."""
super().__init__(parent)
self.marks: MutableMapping[str, str] = collections.OrderedDict()
self._init_lineparser()
for line in self._lineparser:
if not line.strip() or line.startswith("#"):
# Ignore empty or whitespace-only lines and comments.
continue
self._parse_line(line)
self._init_savemanager(objreg.get("save-manager"))
def _init_lineparser(self):
raise NotImplementedError
def _parse_line(self, line):
raise NotImplementedError
def _init_savemanager(self, _save_manager):
raise NotImplementedError
def save(self):
"""Save the marks to disk."""
self._lineparser.data = [" ".join(tpl) for tpl in self.marks.items()]
self._lineparser.save()
def delete(self, key):
"""Delete a quickmark/bookmark.
Args:
key: The key to delete (name for quickmarks, URL for bookmarks.)
"""
del self.marks[key]
self.changed.emit()
def clear(self):
"""Delete all marks."""
self.marks.clear()
self.changed.emit()
class QuickmarkManager(UrlMarkManager):
"""Manager for quickmarks.
The primary key for quickmarks is their *name*, this means:
- self.marks maps names to URLs.
- changed gets emitted with the name as first argument and the URL as
second argument.
"""
def _init_lineparser(self):
self._lineparser = lineparser.LineParser(
standarddir.config(), "quickmarks", parent=self
)
def _init_savemanager(self, save_manager):
filename = os.path.join(standarddir.config(), "quickmarks")
save_manager.add_saveable(
"quickmark-manager", self.save, self.changed, filename=filename
)
def _parse_line(self, line):
try:
key, url = line.rsplit(maxsplit=1)
except ValueError:
message.error("Invalid quickmark '{}'".format(line))
else:
self.marks[key] = url
def prompt_save(self, url):
"""Prompt for a new quickmark name to be added and add it.
Args:
url: The quickmark url as a QUrl.
"""
if not url.isValid():
urlutils.invalid_url_error(url, "save quickmark")
return
urlstr = url.toString(
QUrl.UrlFormattingOption.RemovePassword
| QUrl.ComponentFormattingOption.FullyEncoded
)
message.ask_async(
"Add quickmark:",
usertypes.PromptMode.text,
functools.partial(self.quickmark_add, urlstr),
text="Please enter a quickmark name for<br/><b>{}</b>".format(
html.escape(url.toDisplayString())
),
url=urlstr,
)
@cmdutils.register(instance="quickmark-manager")
def quickmark_add(self, url, name):
"""Add a new quickmark.
You can view all saved quickmarks on the
link:qute://bookmarks[bookmarks page].
Args:
url: The url to add as quickmark.
name: The name for the new quickmark.
"""
# We don't raise cmdutils.CommandError here as this can be called async
# via prompt_save.
if not name:
message.error("Can't set mark with empty name!")
return
if not url:
message.error("Can't set mark with empty URL!")
return
def set_mark():
"""Really set the quickmark."""
self.marks[name] = url
self.changed.emit()
log.misc.debug("Added quickmark {} for {}".format(name, url))
if name in self.marks:
message.confirm_async(
title="Override existing quickmark?",
yes_action=set_mark,
default=True,
url=url,
)
else:
set_mark()
def get_by_qurl(self, url):
"""Look up a quickmark by QUrl, returning its name.
Takes O(n) time, where n is the number of quickmarks.
Use a name instead where possible.
"""
qtutils.ensure_valid(url)
urlstr = url.toString(
QUrl.UrlFormattingOption.RemovePassword
| QUrl.ComponentFormattingOption.FullyEncoded
)
try:
index = list(self.marks.values()).index(urlstr)
key = list(self.marks.keys())[index]
except ValueError:
raise DoesNotExistError("Quickmark for '{}' not found!".format(urlstr))
return key
def get(self, name):
"""Get the URL of the quickmark named name as a QUrl."""
if name not in self.marks:
raise DoesNotExistError("Quickmark '{}' does not exist!".format(name))
urlstr = self.marks[name]
try:
url = urlutils.fuzzy_url(urlstr, do_search=False)
except urlutils.InvalidUrlError as e:
raise InvalidUrlError(
"Invalid URL for quickmark {}: {}".format(name, str(e))
)
return url
class BookmarkManager(UrlMarkManager):
"""Manager for bookmarks.
The primary key for bookmarks is their *url*, this means:
- self.marks maps URLs to titles.
- changed gets emitted with the URL as first argument and the title as
second argument.
"""
def _init_lineparser(self):
bookmarks_directory = os.path.join(standarddir.config(), "bookmarks")
os.makedirs(bookmarks_directory, exist_ok=True)
bookmarks_subdir = os.path.join("bookmarks", "urls")
self._lineparser = lineparser.LineParser(
standarddir.config(), bookmarks_subdir, parent=self
)
def _init_savemanager(self, save_manager):
filename = os.path.join(standarddir.config(), "bookmarks", "urls")
save_manager.add_saveable(
"bookmark-manager", self.save, self.changed, filename=filename
)
def _parse_line(self, line):
parts = line.split(maxsplit=1)
if len(parts) == 2:
self.marks[parts[0]] = parts[1]
elif len(parts) == 1:
self.marks[parts[0]] = ""
def add(self, url, title, *, toggle=False):
"""Add a new bookmark.
Args:
url: The url to add as bookmark.
title: The title for the new bookmark.
toggle: remove the bookmark instead of raising an error if it
already exists.
Return:
True if the bookmark was added, and False if it was
removed (only possible if toggle is True).
"""
if not url.isValid():
errstr = urlutils.get_errstring(url)
raise InvalidUrlError(errstr)
urlstr = url.toString(
QUrl.UrlFormattingOption.RemovePassword
| QUrl.ComponentFormattingOption.FullyEncoded
)
if urlstr in self.marks:
if toggle:
self.delete(urlstr)
return False
else:
raise AlreadyExistsError("Bookmark already exists!")
else:
self.marks[urlstr] = title
self.changed.emit()
return True
|
plugins | app17 | """
$description Social platform delivering live broadcasts on diverse topics, from politics and music to entertainment.
$url 17app.co
$type live
"""
import logging
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
from streamlink.stream.http import HTTPStream
log = logging.getLogger(__name__)
@pluginmatcher(
re.compile(
r"https?://17\.live/.+/live/(?P<channel>[^/&?]+)",
)
)
class App17(Plugin):
def _get_streams(self):
channel = self.match.group("channel")
self.session.http.headers.update({"Referer": self.url})
data = self.session.http.post(
f"https://wap-api.17app.co/api/v1/lives/{channel}/viewers/alive",
data={"liveStreamID": channel},
schema=validate.Schema(
validate.parse_json(),
validate.any(
{
"rtmpUrls": [
{
validate.optional("provider"): validate.any(int, None),
"url": validate.url(path=validate.endswith(".flv")),
}
]
},
{"errorCode": int, "errorMessage": str},
),
),
acceptable_status=(200, 403, 404, 420),
)
log.trace(f"{data!r}")
if data.get("errorCode"):
log.error(f"{data['errorCode']} - {data['errorMessage'].replace('Something wrong: ', '')}")
return
flv_url = data["rtmpUrls"][0]["url"]
yield "live", HTTPStream(self.session, flv_url)
if "wansu-" in flv_url:
hls_url = flv_url.replace(".flv", "/playlist.m3u8")
else:
hls_url = flv_url.replace("live-hdl", "live-hls").replace(".flv", ".m3u8")
s = HLSStream.parse_variant_playlist(self.session, hls_url)
if not s:
yield "live", HLSStream(self.session, hls_url)
elif len(s) == 1:
yield "live", next(iter(s.values()))
else:
yield from s.items()
__plugin__ = App17
|
engines | kickass | # SPDX-License-Identifier: AGPL-3.0-or-later
"""
Kickass Torrent (Videos, Music, Files)
"""
from operator import itemgetter
from urllib.parse import quote, urljoin
from lxml import html
from searx.utils import convert_str_to_int, extract_text, get_torrent_size
# about
about = {
"website": "https://kickass.so",
"wikidata_id": "Q17062285",
"official_api_documentation": None,
"use_official_api": False,
"require_api_key": False,
"results": "HTML",
}
# engine dependent config
categories = ["videos", "music", "files"]
paging = True
# search-url
url = "https://kickass.cd/"
search_url = url + "search/{search_term}/{pageno}/"
# specific xpath variables
magnet_xpath = './/a[@title="Torrent magnet link"]'
torrent_xpath = './/a[@title="Download torrent file"]'
content_xpath = './/span[@class="font11px lightgrey block"]'
# do search-request
def request(query, params):
params["url"] = search_url.format(search_term=quote(query), pageno=params["pageno"])
return params
# get response from search-request
def response(resp):
results = []
dom = html.fromstring(resp.text)
search_res = dom.xpath('//table[@class="data"]//tr')
# return empty array if nothing is found
if not search_res:
return []
# parse results
for result in search_res[1:]:
link = result.xpath('.//a[@class="cellMainLink"]')[0]
href = urljoin(url, link.attrib["href"])
title = extract_text(link)
content = extract_text(result.xpath(content_xpath))
seed = extract_text(result.xpath('.//td[contains(@class, "green")]'))
leech = extract_text(result.xpath('.//td[contains(@class, "red")]'))
filesize_info = extract_text(result.xpath('.//td[contains(@class, "nobr")]'))
files = extract_text(result.xpath('.//td[contains(@class, "center")][2]'))
seed = convert_str_to_int(seed)
leech = convert_str_to_int(leech)
filesize, filesize_multiplier = filesize_info.split()
filesize = get_torrent_size(filesize, filesize_multiplier)
if files.isdigit():
files = int(files)
else:
files = None
magnetlink = result.xpath(magnet_xpath)[0].attrib["href"]
torrentfile = result.xpath(torrent_xpath)[0].attrib["href"]
torrentfileurl = quote(torrentfile, safe="%/:=&?~#+!$,;'@()*")
# append result
results.append(
{
"url": href,
"title": title,
"content": content,
"seed": seed,
"leech": leech,
"filesize": filesize,
"files": files,
"magnetlink": magnetlink,
"torrentfile": torrentfileurl,
"template": "torrent.html",
}
)
# return results sorted by seeder
return sorted(results, key=itemgetter("seed"), reverse=True)
|
admin | users | from CTFd.admin import admin
from CTFd.models import Challenges, Tracking, Users
from CTFd.utils import get_config
from CTFd.utils.decorators import admins_only
from CTFd.utils.modes import TEAMS_MODE
from flask import render_template, request, url_for
from sqlalchemy.sql import not_
@admin.route("/admin/users")
@admins_only
def users_listing():
q = request.args.get("q")
field = request.args.get("field")
page = abs(request.args.get("page", 1, type=int))
filters = []
users = []
if q:
# The field exists as an exposed column
if Users.__mapper__.has_property(field):
filters.append(getattr(Users, field).like("%{}%".format(q)))
if q and field == "ip":
users = (
Users.query.join(Tracking, Users.id == Tracking.user_id)
.filter(Tracking.ip.like("%{}%".format(q)))
.order_by(Users.id.asc())
.paginate(page=page, per_page=50)
)
else:
users = (
Users.query.filter(*filters)
.order_by(Users.id.asc())
.paginate(page=page, per_page=50)
)
args = dict(request.args)
args.pop("page", 1)
return render_template(
"admin/users/users.html",
users=users,
prev_page=url_for(request.endpoint, page=users.prev_num, **args),
next_page=url_for(request.endpoint, page=users.next_num, **args),
q=q,
field=field,
)
@admin.route("/admin/users/new")
@admins_only
def users_new():
return render_template("admin/users/new.html")
@admin.route("/admin/users/<int:user_id>")
@admins_only
def users_detail(user_id):
# Get user object
user = Users.query.filter_by(id=user_id).first_or_404()
# Get the user's solves
solves = user.get_solves(admin=True)
# Get challenges that the user is missing
if get_config("user_mode") == TEAMS_MODE:
if user.team:
all_solves = user.team.get_solves(admin=True)
else:
all_solves = user.get_solves(admin=True)
else:
all_solves = user.get_solves(admin=True)
solve_ids = [s.challenge_id for s in all_solves]
missing = Challenges.query.filter(not_(Challenges.id.in_(solve_ids))).all()
# Get IP addresses that the User has used
addrs = (
Tracking.query.filter_by(user_id=user_id).order_by(Tracking.date.desc()).all()
)
# Get Fails
fails = user.get_fails(admin=True)
# Get Awards
awards = user.get_awards(admin=True)
# Check if the user has an account (team or user)
# so that we don't throw an error if they dont
if user.account:
score = user.account.get_score(admin=True)
place = user.account.get_place(admin=True)
else:
score = None
place = None
return render_template(
"admin/users/user.html",
solves=solves,
user=user,
addrs=addrs,
score=score,
missing=missing,
place=place,
fails=fails,
awards=awards,
)
|
plugins | nounused_test | __copyright__ = "Copyright (C) 2014-2017 Martin Blais"
__license__ = "GNU GPLv2"
import unittest
from beancount import loader
from beancount.parser import cmptest
from beancount.plugins import nounused
class TestValidateUnusedAccounts(cmptest.TestCase):
@loader.load_doc()
def test_validate_unused_accounts(self, entries, _, options_map):
"""
2014-01-01 open Assets:Account1 ; Used, kept open
2014-01-01 open Assets:Account2 ; Used and closed
2014-01-01 open Assets:Account3 ; Unused
2014-01-01 open Equity:Opening-Balances
2014-02-01 *
Assets:Account1 1 USD
Assets:Account2 1 USD
Equity:Opening-Balances -2 USD
2014-06-01 close Assets:Account2
"""
_, errors = nounused.validate_unused_accounts(entries, options_map)
self.assertEqual(1, len(errors))
self.assertEqual("Assets:Account3", errors[0].entry.account)
if __name__ == "__main__":
unittest.main()
|
list | embed | """ book list views"""
from bookwyrm import models
from bookwyrm.settings import PAGE_LENGTH
from django.core.paginator import Paginator
from django.db.models import Avg, DecimalField
from django.db.models.functions import Coalesce
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from django.views import View
from django.views.decorators.clickjacking import xframe_options_exempt
# pylint: disable=no-self-use
class EmbedList(View):
"""embedded book list page"""
def get(self, request, list_id, list_key):
"""display a book list"""
book_list = get_object_or_404(models.List, id=list_id)
embed_key = str(book_list.embed_key.hex)
if list_key != embed_key:
raise Http404()
# sort_by shall be "order" unless a valid alternative is given
sort_by = request.GET.get("sort_by", "order")
if sort_by not in ("order", "title", "rating"):
sort_by = "order"
# direction shall be "ascending" unless a valid alternative is given
direction = request.GET.get("direction", "ascending")
if direction not in ("ascending", "descending"):
direction = "ascending"
directional_sort_by = {
"order": "order",
"title": "book__title",
"rating": "average_rating",
}[sort_by]
if direction == "descending":
directional_sort_by = "-" + directional_sort_by
items = book_list.listitem_set.prefetch_related("user", "book", "book__authors")
if sort_by == "rating":
items = items.annotate(
average_rating=Avg(
Coalesce("book__review__rating", 0.0),
output_field=DecimalField(),
)
)
items = items.filter(approved=True).order_by(directional_sort_by)
paginated = Paginator(items, PAGE_LENGTH)
page = paginated.get_page(request.GET.get("page"))
data = {
"list": book_list,
"items": page,
"page_range": paginated.get_elided_page_range(
page.number, on_each_side=2, on_ends=1
),
}
return TemplateResponse(request, "lists/embed-list.html", data)
@xframe_options_exempt
def unsafe_embed_list(request, *args, **kwargs):
"""allows the EmbedList view to be loaded through unsafe iframe origins"""
embed_list_view = EmbedList.as_view()
return embed_list_view(request, *args, **kwargs)
|
builtinViewColumns | projectionRange | # coding: utf-8
# =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
import gui.mainFrame
# noinspection PyPackageRequirements
import wx
from eos.saveddata.fit import Fit
from gui.bitmap_loader import BitmapLoader
from gui.utils.numberFormatter import formatAmount
from gui.viewColumn import ViewColumn
from logbook import Logger
pyfalog = Logger(__name__)
class ProjectionRangeColumn(ViewColumn):
name = "Projection Range"
def __init__(self, fittingView, params):
super().__init__(fittingView)
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.imageId = fittingView.imageList.GetImageIndex(1391, "icons")
self.bitmap = BitmapLoader.getBitmap(1391, "icons")
self.mask = wx.LIST_MASK_IMAGE
def getText(self, stuff):
if isinstance(stuff, Fit):
fitID = self.mainFrame.getActiveFit()
info = stuff.getProjectionInfo(fitID)
projRange = info.projectionRange
else:
projRange = getattr(stuff, "projectionRange", None)
if projRange is None:
return ""
return formatAmount(projRange, 3, 0, 3, unitName="m")
def getToolTip(self, mod):
return "Projection Range"
ProjectionRangeColumn.register()
|
css | csscomment | """CSSComment is not defined in DOM Level 2 at all but a cssutils defined
class only.
Implements CSSRule which is also extended for a CSSComment rule type.
"""
__all__ = ["CSSComment"]
__docformat__ = "restructuredtext"
__version__ = "$Id$"
import xml.dom
import cssrule
import cssutils
class CSSComment(cssrule.CSSRule):
"""
Represents a CSS comment (cssutils only).
Format::
/*...*/
"""
def __init__(
self, cssText=None, parentRule=None, parentStyleSheet=None, readonly=False
):
super(CSSComment, self).__init__(
parentRule=parentRule, parentStyleSheet=parentStyleSheet
)
self._cssText = None
if cssText:
self._setCssText(cssText)
self._readonly = readonly
def __repr__(self):
return "cssutils.css.%s(cssText=%r)" % (self.__class__.__name__, self.cssText)
def __str__(self):
return "<cssutils.css.%s object cssText=%r at 0x%x>" % (
self.__class__.__name__,
self.cssText,
id(self),
)
def _getCssText(self):
"""Return serialized property cssText."""
return cssutils.ser.do_CSSComment(self)
def _setCssText(self, cssText):
"""
:param cssText:
textual text to set or tokenlist which is not tokenized
anymore. May also be a single token for this rule
:exceptions:
- :exc:`~xml.dom.SyntaxErr`:
Raised if the specified CSS string value has a syntax error and
is unparsable.
- :exc:`~xml.dom.InvalidModificationErr`:
Raised if the specified CSS string value represents a different
type of rule than the current one.
- :exc:`~xml.dom.NoModificationAllowedErr`:
Raised if the rule is readonly.
"""
super(CSSComment, self)._setCssText(cssText)
tokenizer = self._tokenize2(cssText)
commenttoken = self._nexttoken(tokenizer)
unexpected = self._nexttoken(tokenizer)
if (
not commenttoken
or self._type(commenttoken) != self._prods.COMMENT
or unexpected
):
self._log.error(
"CSSComment: Not a CSSComment: %r" % self._valuestr(cssText),
error=xml.dom.InvalidModificationErr,
)
else:
self._cssText = self._tokenvalue(commenttoken)
cssText = property(
_getCssText,
_setCssText,
doc="The parsable textual representation of this rule.",
)
type = property(
lambda self: self.COMMENT,
doc="The type of this rule, as defined by a CSSRule " "type constant.",
)
# constant but needed:
wellformed = property(lambda self: True)
|
fighter | projectedChangeProjectionRange | import wx
from logbook import Logger
from service.fit import Fit
pyfalog = Logger(__name__)
class CalcChangeProjectedFighterProjectionRangeCommand(wx.Command):
def __init__(self, fitID, position, projectionRange):
wx.Command.__init__(self, True, "Change Projected Fighter Projection Range")
self.fitID = fitID
self.position = position
self.projectionRange = projectionRange
self.savedProjectionRange = None
def Do(self):
pyfalog.debug(
"Doing changing of projected fighter projection range to {} at position {} for fit {}".format(
self.projectionRange, self.position, self.fitID
)
)
fit = Fit.getInstance().getFit(self.fitID)
fighter = fit.projectedFighters[self.position]
if fighter.projectionRange == self.projectionRange:
return False
self.savedProjectionRange = fighter.projectionRange
fighter.projectionRange = self.projectionRange
return True
def Undo(self):
pyfalog.debug(
"Undoing changing of projected fighter projection range to {} at position {} for fit {}".format(
self.projectionRange, self.position, self.fitID
)
)
cmd = CalcChangeProjectedFighterProjectionRangeCommand(
fitID=self.fitID,
position=self.position,
projectionRange=self.savedProjectionRange,
)
return cmd.Do()
|
extractor | tennistv | # coding: utf-8
from __future__ import unicode_literals
import json
from ..utils import ExtractorError, unified_timestamp
from .common import InfoExtractor
class TennisTVIE(InfoExtractor):
_VALID_URL = r"https?://(?:www\.)?tennistv\.com/videos/(?P<id>[-a-z0-9]+)"
_TEST = {
"url": "https://www.tennistv.com/videos/indian-wells-2018-verdasco-fritz",
"info_dict": {
"id": "indian-wells-2018-verdasco-fritz",
"ext": "mp4",
"title": "Fernando Verdasco v Taylor Fritz",
"description": "re:^After his stunning victory.{174}$",
"thumbnail": "https://atp-prod.akamaized.net/api/images/v1/images/112831/landscape/1242/0",
"timestamp": 1521017381,
"upload_date": "20180314",
},
"params": {
"skip_download": True,
},
"skip": "Requires email and password of a subscribed account",
}
_NETRC_MACHINE = "tennistv"
def _login(self):
username, password = self._get_login_info()
if not username or not password:
raise ExtractorError(
"No login info available, needed for using %s." % self.IE_NAME,
expected=True,
)
login_form = {
"Email": username,
"Password": password,
}
login_json = json.dumps(login_form).encode("utf-8")
headers = {
"content-type": "application/json",
"Referer": "https://www.tennistv.com/login",
"Origin": "https://www.tennistv.com",
}
login_result = self._download_json(
"https://www.tennistv.com/api/users/v1/login",
None,
note="Logging in",
errnote="Login failed (wrong password?)",
headers=headers,
data=login_json,
)
if login_result["error"]["errorCode"]:
raise ExtractorError(
"Login failed, %s said: %r"
% (self.IE_NAME, login_result["error"]["errorMessage"])
)
if login_result["entitlement"] != "SUBSCRIBED":
self.report_warning(
"%s may not be subscribed to %s." % (username, self.IE_NAME)
)
self._session_token = login_result["sessionToken"]
def _real_initialize(self):
self._login()
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
internal_id = self._search_regex(
r"video=([0-9]+)", webpage, "internal video id"
)
headers = {
"Origin": "https://www.tennistv.com",
"authorization": "ATP %s" % self._session_token,
"content-type": "application/json",
"Referer": url,
}
check_data = {
"videoID": internal_id,
"VideoUrlType": "HLSV3",
}
check_json = json.dumps(check_data).encode("utf-8")
check_result = self._download_json(
"https://www.tennistv.com/api/users/v1/entitlementchecknondiva",
video_id,
note="Checking video authorization",
headers=headers,
data=check_json,
)
formats = self._extract_m3u8_formats(
check_result["contentUrl"], video_id, ext="mp4"
)
vdata_url = (
"https://www.tennistv.com/api/channels/v1/de/none/video/%s" % video_id
)
vdata = self._download_json(vdata_url, video_id)
timestamp = unified_timestamp(vdata["timestamp"])
thumbnail = vdata["video"]["thumbnailUrl"]
description = vdata["displayText"]["description"]
title = vdata["video"]["title"]
series = vdata["tour"]
venue = vdata["displayText"]["venue"]
round_str = vdata["seo"]["round"]
return {
"id": video_id,
"title": title,
"description": description,
"formats": formats,
"thumbnail": thumbnail,
"timestamp": timestamp,
"series": series,
"season": venue,
"episode": round_str,
}
|
serializers | incidents | from apps.alerts.models import AlertGroup
from apps.telegram.models.message import TelegramMessage
from common.api_helpers.mixins import EagerLoadingMixin
from common.constants.alert_group_restrictions import IS_RESTRICTED_TITLE
from django.db.models import Prefetch
from rest_framework import serializers
class IncidentSerializer(EagerLoadingMixin, serializers.ModelSerializer):
id = serializers.CharField(read_only=True, source="public_primary_key")
integration_id = serializers.CharField(source="channel.public_primary_key")
route_id = serializers.SerializerMethodField()
created_at = serializers.DateTimeField(source="started_at")
alerts_count = serializers.SerializerMethodField()
title = serializers.SerializerMethodField()
state = serializers.SerializerMethodField()
SELECT_RELATED = [
"channel",
"channel_filter",
"slack_message",
"channel__organization",
]
PREFETCH_RELATED = [
"alerts",
Prefetch(
"telegram_messages",
TelegramMessage.objects.filter(
chat_id__startswith="-",
message_type=TelegramMessage.ALERT_GROUP_MESSAGE,
),
to_attr="prefetched_telegram_messages",
),
]
class Meta:
model = AlertGroup
fields = [
"id",
"integration_id",
"route_id",
"alerts_count",
"state",
"created_at",
"resolved_at",
"acknowledged_at",
"title",
"permalinks",
]
def get_title(self, obj):
return IS_RESTRICTED_TITLE if obj.is_restricted else obj.web_title_cache
def get_alerts_count(self, obj):
return len(obj.alerts.all())
def get_state(self, obj):
return obj.state
def get_route_id(self, obj):
if obj.channel_filter is not None:
return obj.channel_filter.public_primary_key
else:
return None
|
widgets | canvas_renderer | # -*- coding: utf-8 -*-
# --------------------------------------------------------------------------
# Software: InVesalius - Software de Reconstrucao 3D de Imagens Medicas
# Copyright: (C) 2001 Centro de Pesquisas Renato Archer
# Homepage: http://www.softwarepublico.gov.br
# Contact: invesalius@cti.gov.br
# License: GNU - GPL 2 (LICENSE.txt/LICENCA.txt)
# --------------------------------------------------------------------------
# Este programa e software livre; voce pode redistribui-lo e/ou
# modifica-lo sob os termos da Licenca Publica Geral GNU, conforme
# publicada pela Free Software Foundation; de acordo com a versao 2
# da Licenca.
#
# Este programa eh distribuido na expectativa de ser util, mas SEM
# QUALQUER GARANTIA; sem mesmo a garantia implicita de
# COMERCIALIZACAO ou de ADEQUACAO A QUALQUER PROPOSITO EM
# PARTICULAR. Consulte a Licenca Publica Geral GNU para obter mais
# detalhes.
# --------------------------------------------------------------------------
import sys
import numpy as np
import wx
try:
from weakref import WeakMethod
except ImportError:
from weakrefmethod import WeakMethod
from invesalius.data import converters
from vtkmodules.vtkRenderingCore import vtkActor2D, vtkCoordinate, vtkImageMapper
class CanvasEvent:
def __init__(
self,
event_name,
root_event_obj,
pos,
viewer,
renderer,
control_down=False,
alt_down=False,
shift_down=False,
):
self.root_event_obj = root_event_obj
self.event_name = event_name
self.position = pos
self.viewer = viewer
self.renderer = renderer
self.control_down = control_down
self.alt_down = alt_down
self.shift_down = shift_down
class CanvasRendererCTX:
def __init__(self, viewer, evt_renderer, canvas_renderer, orientation=None):
"""
A Canvas to render over a vtktRenderer.
Params:
evt_renderer: a vtkRenderer which this class is going to watch for
any render event to update the canvas content.
canvas_renderer: the vtkRenderer where the canvas is going to be
added.
This class uses wx.GraphicsContext to render to a vtkImage.
TODO: Verify why in Windows the color are strange when using transparency.
TODO: Add support to evento (ex. click on a square)
"""
self.viewer = viewer
self.canvas_renderer = canvas_renderer
self.evt_renderer = evt_renderer
self._size = self.canvas_renderer.GetSize()
self.draw_list = []
self._ordered_draw_list = []
self.orientation = orientation
self.gc = None
self.last_cam_modif_time = -1
self.modified = True
self._drawn = False
self._init_canvas()
self._over_obj = None
self._drag_obj = None
self._selected_obj = None
self._callback_events = {
"LeftButtonPressEvent": [],
"LeftButtonReleaseEvent": [],
"LeftButtonDoubleClickEvent": [],
"MouseMoveEvent": [],
}
self._bind_events()
def _bind_events(self):
iren = self.viewer.interactor
iren.Bind(wx.EVT_MOTION, self.OnMouseMove)
iren.Bind(wx.EVT_LEFT_DOWN, self.OnLeftButtonPress)
iren.Bind(wx.EVT_LEFT_UP, self.OnLeftButtonRelease)
iren.Bind(wx.EVT_LEFT_DCLICK, self.OnDoubleClick)
self.canvas_renderer.AddObserver("StartEvent", self.OnPaint)
def subscribe_event(self, event, callback):
ref = WeakMethod(callback)
self._callback_events[event].append(ref)
def unsubscribe_event(self, event, callback):
for n, cb in enumerate(self._callback_events[event]):
if cb() == callback:
print("removed")
self._callback_events[event].pop(n)
return
def propagate_event(self, root, event):
print("propagating", event.event_name, "from", root)
node = root
callback_name = "on_%s" % event.event_name
while node:
try:
getattr(node, callback_name)(event)
except AttributeError as e:
print("errror", node, e)
node = node.parent
def _init_canvas(self):
w, h = self._size
self._array = np.zeros((h, w, 4), dtype=np.uint8)
self._cv_image = converters.np_rgba_to_vtk(self._array)
self.mapper = vtkImageMapper()
self.mapper.SetInputData(self._cv_image)
self.mapper.SetColorWindow(255)
self.mapper.SetColorLevel(128)
self.actor = vtkActor2D()
self.actor.SetPosition(0, 0)
self.actor.SetMapper(self.mapper)
self.actor.GetProperty().SetOpacity(0.99)
self.canvas_renderer.AddActor2D(self.actor)
self.rgb = np.zeros((h, w, 3), dtype=np.uint8)
self.alpha = np.zeros((h, w, 1), dtype=np.uint8)
self.bitmap = wx.Bitmap.FromRGBA(w, h)
try:
self.image = wx.Image(w, h, self.rgb, self.alpha)
except TypeError:
self.image = wx.ImageFromBuffer(w, h, self.rgb, self.alpha)
def _resize_canvas(self, w, h):
self._array = np.zeros((h, w, 4), dtype=np.uint8)
self._cv_image = converters.np_rgba_to_vtk(self._array)
self.mapper.SetInputData(self._cv_image)
self.mapper.Update()
self.rgb = np.zeros((h, w, 3), dtype=np.uint8)
self.alpha = np.zeros((h, w, 1), dtype=np.uint8)
self.bitmap = wx.Bitmap.FromRGBA(w, h)
try:
self.image = wx.Image(w, h, self.rgb, self.alpha)
except TypeError:
self.image = wx.ImageFromBuffer(w, h, self.rgb, self.alpha)
self.modified = True
def remove_from_renderer(self):
self.canvas_renderer.RemoveActor(self.actor)
self.evt_renderer.RemoveObservers("StartEvent")
def get_over_mouse_obj(self, x, y):
for n, i in self._ordered_draw_list[::-1]:
try:
obj = i.is_over(x, y)
self._over_obj = obj
if obj:
print("is over at", n, i)
return True
except AttributeError:
pass
return False
def Refresh(self):
self.modified = True
self.viewer.interactor.Render()
def OnMouseMove(self, evt):
try:
x, y = self.viewer.get_vtk_mouse_position()
except AttributeError:
evt.Skip()
return
redraw = False
if self._drag_obj:
redraw = True
evt_obj = CanvasEvent(
"mouse_move",
self._drag_obj,
(x, y),
self.viewer,
self.evt_renderer,
control_down=evt.ControlDown(),
alt_down=evt.AltDown(),
shift_down=evt.ShiftDown(),
)
self.propagate_event(self._drag_obj, evt_obj)
# self._drag_obj.mouse_move(evt_obj)
else:
was_over = self._over_obj
redraw = self.get_over_mouse_obj(x, y) or was_over
if was_over and was_over != self._over_obj:
try:
evt_obj = CanvasEvent(
"mouse_leave",
was_over,
(x, y),
self.viewer,
self.evt_renderer,
control_down=evt.ControlDown(),
alt_down=evt.AltDown(),
shift_down=evt.ShiftDown(),
)
was_over.on_mouse_leave(evt_obj)
except AttributeError:
pass
if self._over_obj:
try:
evt_obj = CanvasEvent(
"mouse_enter",
self._over_obj,
(x, y),
self.viewer,
self.evt_renderer,
control_down=evt.ControlDown(),
alt_down=evt.AltDown(),
shift_down=evt.ShiftDown(),
)
self._over_obj.on_mouse_enter(evt_obj)
except AttributeError:
pass
if redraw:
# Publisher.sendMessage('Redraw canvas %s' % self.orientation)
self.Refresh()
evt.Skip()
def OnLeftButtonPress(self, evt):
try:
x, y = self.viewer.get_vtk_mouse_position()
except AttributeError:
evt.Skip()
return
if self._over_obj and hasattr(self._over_obj, "on_mouse_move"):
if hasattr(self._over_obj, "on_select"):
try:
evt_obj = CanvasEvent(
"deselect",
self._over_obj,
(x, y),
self.viewer,
self.evt_renderer,
control_down=evt.ControlDown(),
alt_down=evt.AltDown(),
shift_down=evt.ShiftDown(),
)
# self._selected_obj.on_deselect(evt_obj)
self.propagate_event(self._selected_obj, evt_obj)
except AttributeError:
pass
evt_obj = CanvasEvent(
"select",
self._over_obj,
(x, y),
self.viewer,
self.evt_renderer,
control_down=evt.ControlDown(),
alt_down=evt.AltDown(),
shift_down=evt.ShiftDown(),
)
# self._over_obj.on_select(evt_obj)
self.propagate_event(self._over_obj, evt_obj)
self._selected_obj = self._over_obj
self.Refresh()
self._drag_obj = self._over_obj
else:
self.get_over_mouse_obj(x, y)
if not self._over_obj:
evt_obj = CanvasEvent(
"leftclick",
None,
(x, y),
self.viewer,
self.evt_renderer,
control_down=evt.ControlDown(),
alt_down=evt.AltDown(),
shift_down=evt.ShiftDown(),
)
# self._selected_obj.on_deselect(evt_obj)
for cb in self._callback_events["LeftButtonPressEvent"]:
if cb() is not None:
cb()(evt_obj)
break
try:
evt_obj = CanvasEvent(
"deselect",
self._over_obj,
(x, y),
self.viewer,
self.evt_renderer,
control_down=evt.ControlDown(),
alt_down=evt.AltDown(),
shift_down=evt.ShiftDown(),
)
# self._selected_obj.on_deselect(evt_obj)
if self._selected_obj.on_deselect(evt_obj):
self.Refresh()
except AttributeError:
pass
evt.Skip()
def OnLeftButtonRelease(self, evt):
self._over_obj = None
self._drag_obj = None
evt.Skip()
def OnDoubleClick(self, evt):
try:
x, y = self.viewer.get_vtk_mouse_position()
except AttributeError:
evt.Skip()
return
evt_obj = CanvasEvent(
"double_left_click",
None,
(x, y),
self.viewer,
self.evt_renderer,
control_down=evt.ControlDown(),
alt_down=evt.AltDown(),
shift_down=evt.ShiftDown(),
)
for cb in self._callback_events["LeftButtonDoubleClickEvent"]:
if cb() is not None:
cb()(evt_obj)
break
evt.Skip()
def OnPaint(self, evt, obj):
size = self.canvas_renderer.GetSize()
w, h = size
ew, eh = self.evt_renderer.GetSize()
if self._size != size:
self._size = size
self._resize_canvas(w, h)
cam_modif_time = self.evt_renderer.GetActiveCamera().GetMTime()
if (not self.modified) and cam_modif_time == self.last_cam_modif_time:
return
self.last_cam_modif_time = cam_modif_time
self._array[:] = 0
coord = vtkCoordinate()
self.image.SetDataBuffer(self.rgb)
self.image.SetAlphaBuffer(self.alpha)
self.image.Clear()
gc = wx.GraphicsContext.Create(self.image)
if sys.platform != "darwin":
gc.SetAntialiasMode(0)
self.gc = gc
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
# font.SetWeight(wx.BOLD)
font = gc.CreateFont(font, (0, 0, 255))
gc.SetFont(font)
pen = wx.Pen(wx.Colour(255, 0, 0, 128), 2, wx.SOLID)
brush = wx.Brush(wx.Colour(0, 255, 0, 128))
gc.SetPen(pen)
gc.SetBrush(brush)
gc.Scale(1, -1)
self._ordered_draw_list = sorted(self._follow_draw_list(), key=lambda x: x[0])
for (
l,
d,
) in self._ordered_draw_list: # sorted(self.draw_list, key=lambda x: x.layer if hasattr(x, 'layer') else 0):
d.draw_to_canvas(gc, self)
gc.Destroy()
self.gc = None
if self._drawn:
self.bitmap = self.image.ConvertToBitmap()
self.bitmap.CopyToBuffer(self._array, wx.BitmapBufferFormat_RGBA)
self._cv_image.Modified()
self.modified = False
self._drawn = False
def _follow_draw_list(self):
out = []
def loop(node, layer):
for child in node.children:
loop(child, layer + child.layer)
out.append((layer + child.layer, child))
for element in self.draw_list:
out.append((element.layer, element))
if hasattr(element, "children"):
loop(element, element.layer)
return out
def draw_element_to_array(self, elements, size=None, antialiasing=False, flip=True):
"""
Draws the given elements to a array.
Params:
elements: a list of elements (objects that contains the
draw_to_canvas method) to draw to a array.
flip: indicates if it is necessary to flip. In this canvas the Y
coordinates starts in the bottom of the screen.
"""
if size is None:
size = self.canvas_renderer.GetSize()
w, h = size
image = wx.Image(w, h)
image.Clear()
arr = np.zeros((h, w, 4), dtype=np.uint8)
gc = wx.GraphicsContext.Create(image)
if antialiasing:
gc.SetAntialiasMode(0)
old_gc = self.gc
self.gc = gc
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
font = gc.CreateFont(font, (0, 0, 255))
gc.SetFont(font)
pen = wx.Pen(wx.Colour(255, 0, 0, 128), 2, wx.SOLID)
brush = wx.Brush(wx.Colour(0, 255, 0, 128))
gc.SetPen(pen)
gc.SetBrush(brush)
gc.Scale(1, -1)
for element in elements:
element.draw_to_canvas(gc, self)
gc.Destroy()
self.gc = old_gc
bitmap = image.ConvertToBitmap()
bitmap.CopyToBuffer(arr, wx.BitmapBufferFormat_RGBA)
if flip:
arr = arr[::-1]
return arr
def calc_text_size(self, text, font=None):
"""
Given an unicode text and a font returns the width and height of the
rendered text in pixels.
Params:
text: An unicode text.
font: An wxFont.
Returns:
A tuple with width and height values in pixels
"""
if self.gc is None:
return None
gc = self.gc
if font is None:
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
_font = gc.CreateFont(font)
gc.SetFont(_font)
w = 0
h = 0
for t in text.split("\n"):
_w, _h = gc.GetTextExtent(t)
w = max(w, _w)
h += _h
return w, h
def draw_line(
self,
pos0,
pos1,
arrow_start=False,
arrow_end=False,
colour=(255, 0, 0, 128),
width=2,
style=wx.SOLID,
):
"""
Draw a line from pos0 to pos1
Params:
pos0: the start of the line position (x, y).
pos1: the end of the line position (x, y).
arrow_start: if to draw a arrow at the start of the line.
arrow_end: if to draw a arrow at the end of the line.
colour: RGBA line colour.
width: the width of line.
style: default wx.SOLID.
"""
if self.gc is None:
return None
gc = self.gc
p0x, p0y = pos0
p1x, p1y = pos1
p0y = -p0y
p1y = -p1y
pen = wx.Pen(wx.Colour(*[int(c) for c in colour]), width, wx.SOLID)
pen.SetCap(wx.CAP_BUTT)
gc.SetPen(pen)
path = gc.CreatePath()
path.MoveToPoint(p0x, p0y)
path.AddLineToPoint(p1x, p1y)
gc.StrokePath(path)
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
font = gc.CreateFont(font)
gc.SetFont(font)
w, h = gc.GetTextExtent("M")
p0 = np.array((p0x, p0y))
p3 = np.array((p1x, p1y))
if arrow_start:
v = p3 - p0
v = v / np.linalg.norm(v)
iv = np.array((v[1], -v[0]))
p1 = p0 + w * v + iv * w / 2.0
p2 = p0 + w * v + (-iv) * w / 2.0
path = gc.CreatePath()
path.MoveToPoint(p0)
path.AddLineToPoint(p1)
path.MoveToPoint(p0)
path.AddLineToPoint(p2)
gc.StrokePath(path)
if arrow_end:
v = p3 - p0
v = v / np.linalg.norm(v)
iv = np.array((v[1], -v[0]))
p1 = p3 - w * v + iv * w / 2.0
p2 = p3 - w * v + (-iv) * w / 2.0
path = gc.CreatePath()
path.MoveToPoint(p3)
path.AddLineToPoint(p1)
path.MoveToPoint(p3)
path.AddLineToPoint(p2)
gc.StrokePath(path)
self._drawn = True
def draw_circle(
self,
center,
radius=2.5,
width=2,
line_colour=(255, 0, 0, 128),
fill_colour=(0, 0, 0, 0),
):
"""
Draw a circle centered at center with the given radius.
Params:
center: (x, y) position.
radius: float number.
width: line width.
line_colour: RGBA line colour
fill_colour: RGBA fill colour.
"""
if self.gc is None:
return None
gc = self.gc
pen = wx.Pen(wx.Colour(*line_colour), width, wx.SOLID)
gc.SetPen(pen)
brush = wx.Brush(wx.Colour(*fill_colour))
gc.SetBrush(brush)
cx, cy = center
cy = -cy
path = gc.CreatePath()
path.AddCircle(cx, cy, radius)
gc.StrokePath(path)
gc.FillPath(path)
self._drawn = True
return (cx, -cy, radius * 2, radius * 2)
def draw_ellipse(
self,
center,
width,
height,
line_width=2,
line_colour=(255, 0, 0, 128),
fill_colour=(0, 0, 0, 0),
):
"""
Draw a ellipse centered at center with the given width and height.
Params:
center: (x, y) position.
width: ellipse width (float number).
height: ellipse height (float number)
line_width: line width.
line_colour: RGBA line colour
fill_colour: RGBA fill colour.
"""
if self.gc is None:
return None
gc = self.gc
pen = wx.Pen(wx.Colour(*line_colour), line_width, wx.SOLID)
gc.SetPen(pen)
brush = wx.Brush(wx.Colour(*fill_colour))
gc.SetBrush(brush)
cx, cy = center
xi = cx - width / 2.0
xf = cx + width / 2.0
yi = cy - height / 2.0
yf = cy + height / 2.0
cx -= width / 2.0
cy += height / 2.0
cy = -cy
path = gc.CreatePath()
path.AddEllipse(cx, cy, width, height)
gc.StrokePath(path)
gc.FillPath(path)
self._drawn = True
return (xi, yi, xf, yf)
def draw_rectangle(
self,
pos,
width,
height,
line_colour=(255, 0, 0, 128),
fill_colour=(0, 0, 0, 0),
line_width=1,
pen_style=wx.PENSTYLE_SOLID,
brush_style=wx.BRUSHSTYLE_SOLID,
):
"""
Draw a rectangle with its top left at pos and with the given width and height.
Params:
pos: The top left pos (x, y) of the rectangle.
width: width of the rectangle.
height: heigth of the rectangle.
line_colour: RGBA line colour.
fill_colour: RGBA fill colour.
"""
if self.gc is None:
return None
gc = self.gc
px, py = pos
py = -py
pen = wx.Pen(wx.Colour(*line_colour), width=line_width, style=pen_style)
brush = wx.Brush(wx.Colour(*fill_colour), style=brush_style)
gc.SetPen(pen)
gc.SetBrush(brush)
gc.DrawRectangle(px, py, width, -height)
self._drawn = True
def draw_text(self, text, pos, font=None, txt_colour=(255, 255, 255)):
"""
Draw text.
Params:
text: an unicode text.
pos: (x, y) top left position.
font: if None it'll use the default gui font.
txt_colour: RGB text colour
"""
if self.gc is None:
return None
gc = self.gc
if font is None:
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
font.Scale(self.viewer.GetContentScaleFactor())
_font = gc.CreateFont(font, txt_colour)
px, py = pos
for t in text.split("\n"):
t = t.strip()
_py = -py
_px = px
gc.SetFont(_font)
gc.DrawText(t, _px, _py)
w, h = self.calc_text_size(t, font)
py -= h
self._drawn = True
def draw_text_box(
self,
text,
pos,
font=None,
txt_colour=(255, 255, 255),
bg_colour=(128, 128, 128, 128),
border=5,
):
"""
Draw text inside a text box.
Params:
text: an unicode text.
pos: (x, y) top left position.
font: if None it'll use the default gui font.
txt_colour: RGB text colour
bg_colour: RGBA box colour
border: the border size.
"""
if self.gc is None:
return None
gc = self.gc
if font is None:
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
font.Scale(self.viewer.GetContentScaleFactor())
_font = gc.CreateFont(font, txt_colour)
gc.SetFont(_font)
w, h = self.calc_text_size(text, font)
px, py = pos
# Drawing the box
cw, ch = w + border * 2, h + border * 2
self.draw_rectangle((px, py - ch), cw, ch, bg_colour, bg_colour)
# Drawing the text
tpx, tpy = px + border, py - border
self.draw_text(text, (tpx, tpy), font, txt_colour)
self._drawn = True
return px, py, cw, ch
def draw_arc(self, center, p0, p1, line_colour=(255, 0, 0, 128), width=2):
"""
Draw an arc passing in p0 and p1 centered at center.
Params:
center: (x, y) center of the arc.
p0: (x, y).
p1: (x, y).
line_colour: RGBA line colour.
width: width of the line.
"""
if self.gc is None:
return None
gc = self.gc
pen = wx.Pen(wx.Colour(*line_colour), width, wx.SOLID)
gc.SetPen(pen)
c = np.array(center)
v0 = np.array(p0) - c
v1 = np.array(p1) - c
c[1] = -c[1]
v0[1] = -v0[1]
v1[1] = -v1[1]
s0 = np.linalg.norm(v0)
s1 = np.linalg.norm(v1)
a0 = np.arctan2(v0[1], v0[0])
a1 = np.arctan2(v1[1], v1[0])
if (a1 - a0) % (np.pi * 2) < (a0 - a1) % (np.pi * 2):
sa = a0
ea = a1
else:
sa = a1
ea = a0
path = gc.CreatePath()
path.AddArc(
float(c[0]), float(c[1]), float(min(s0, s1)), float(sa), float(ea), True
)
gc.StrokePath(path)
self._drawn = True
def draw_polygon(
self,
points,
fill=True,
closed=False,
line_colour=(255, 255, 255, 255),
fill_colour=(255, 255, 255, 255),
width=2,
):
if self.gc is None:
return None
gc = self.gc
gc.SetPen(wx.Pen(wx.Colour(*line_colour), width, wx.SOLID))
gc.SetBrush(wx.Brush(wx.Colour(*fill_colour), wx.SOLID))
if points:
path = gc.CreatePath()
px, py = points[0]
path.MoveToPoint((px, -py))
for point in points:
px, py = point
path.AddLineToPoint((px, -py))
if closed:
px, py = points[0]
path.AddLineToPoint((px, -py))
gc.StrokePath(path)
gc.FillPath(path)
self._drawn = True
return path
class CanvasHandlerBase(object):
def __init__(self, parent):
self.parent = parent
self.children = []
self.layer = 0
self._visible = True
@property
def visible(self):
return self._visible
@visible.setter
def visible(self, value):
self._visible = value
for child in self.children:
child.visible = value
def _3d_to_2d(self, renderer, pos):
coord = vtkCoordinate()
coord.SetValue(pos)
px, py = coord.GetComputedDoubleDisplayValue(renderer)
return px, py
def add_child(self, child):
self.children.append(child)
def draw_to_canvas(self, gc, canvas):
pass
def is_over(self, x, y):
xi, yi, xf, yf = self.bbox
if xi <= x <= xf and yi <= y <= yf:
return self
return None
class TextBox(CanvasHandlerBase):
def __init__(
self,
parent,
text,
position=(0, 0, 0),
text_colour=(0, 0, 0, 255),
box_colour=(255, 255, 255, 255),
):
super(TextBox, self).__init__(parent)
self.layer = 0
self.text = text
self.text_colour = text_colour
self.box_colour = box_colour
self.position = position
self.children = []
self.bbox = (0, 0, 0, 0)
self._highlight = False
self._last_position = (0, 0, 0)
def set_text(self, text):
self.text = text
def draw_to_canvas(self, gc, canvas):
if self.visible:
px, py = self._3d_to_2d(canvas.evt_renderer, self.position)
x, y, w, h = canvas.draw_text_box(
self.text,
(px, py),
txt_colour=self.text_colour,
bg_colour=self.box_colour,
)
if self._highlight:
rw, rh = canvas.evt_renderer.GetSize()
canvas.draw_rectangle(
(px, py - h), w, h, (255, 0, 0, 25), (255, 0, 0, 25)
)
self.bbox = (x, y - h, x + w, y)
def is_over(self, x, y):
xi, yi, xf, yf = self.bbox
if xi <= x <= xf and yi <= y <= yf:
return self
return None
def on_mouse_move(self, evt):
mx, my = evt.position
x, y, z = evt.viewer.get_coordinate_cursor(mx, my)
self.position = [
i - j + k
for (i, j, k) in zip((x, y, z), self._last_position, self.position)
]
self._last_position = (x, y, z)
return True
def on_mouse_enter(self, evt):
# self.layer = 99
self._highlight = True
def on_mouse_leave(self, evt):
# self.layer = 0
self._highlight = False
def on_select(self, evt):
mx, my = evt.position
x, y, z = evt.viewer.get_coordinate_cursor(mx, my)
self._last_position = (x, y, z)
class CircleHandler(CanvasHandlerBase):
def __init__(
self,
parent,
position,
radius=5,
line_colour=(255, 255, 255, 255),
fill_colour=(0, 0, 0, 0),
is_3d=True,
):
super(CircleHandler, self).__init__(parent)
self.layer = 0
self.position = position
self.radius = radius
self.line_colour = line_colour
self.fill_colour = fill_colour
self.bbox = (0, 0, 0, 0)
self.is_3d = is_3d
self.children = []
self._on_move_function = None
def on_move(self, evt_function):
self._on_move_function = WeakMethod(evt_function)
def draw_to_canvas(self, gc, canvas):
if self.visible:
viewer = canvas.viewer
scale = viewer.GetContentScaleFactor()
if self.is_3d:
px, py = self._3d_to_2d(canvas.evt_renderer, self.position)
else:
px, py = self.position
x, y, w, h = canvas.draw_circle(
(px, py),
self.radius * scale,
line_colour=self.line_colour,
fill_colour=self.fill_colour,
)
self.bbox = (x - w / 2, y - h / 2, x + w / 2, y + h / 2)
def on_mouse_move(self, evt):
mx, my = evt.position
if self.is_3d:
x, y, z = evt.viewer.get_coordinate_cursor(mx, my)
self.position = (x, y, z)
else:
self.position = mx, my
if self._on_move_function and self._on_move_function():
self._on_move_function()(self, evt)
return True
class Polygon(CanvasHandlerBase):
def __init__(
self,
parent,
points=None,
fill=True,
closed=True,
line_colour=(255, 255, 255, 255),
fill_colour=(255, 255, 255, 128),
width=2,
interactive=True,
is_3d=True,
):
super(Polygon, self).__init__(parent)
self.layer = 0
self.children = []
if points is None:
self.points = []
else:
self.points = points
self.handlers = []
self.fill = fill
self.closed = closed
self.line_colour = line_colour
self._path = None
if self.fill:
self.fill_colour = fill_colour
else:
self.fill_colour = (0, 0, 0, 0)
self.width = width
self._interactive = interactive
self.is_3d = is_3d
@property
def interactive(self):
return self._interactive
@interactive.setter
def interactive(self, value):
self._interactive = value
for handler in self.handlers:
handler.visible = value
def draw_to_canvas(self, gc, canvas):
if self.visible and self.points:
if self.is_3d:
points = [self._3d_to_2d(canvas.evt_renderer, p) for p in self.points]
else:
points = self.points
self._path = canvas.draw_polygon(
points,
self.fill,
self.closed,
self.line_colour,
self.fill_colour,
self.width,
)
# if self.closed:
# U, L = self.convex_hull(points, merge=False)
# canvas.draw_polygon(U, self.fill, self.closed, self.line_colour, (0, 255, 0, 255), self.width)
# canvas.draw_polygon(L, self.fill, self.closed, self.line_colour, (0, 0, 255, 255), self.width)
# for p0, p1 in self.get_all_antipodal_pairs(points):
# canvas.draw_line(p0, p1)
# if self.interactive:
# for handler in self.handlers:
# handler.draw_to_canvas(gc, canvas)
def append_point(self, point):
handler = CircleHandler(
self, point, is_3d=self.is_3d, fill_colour=(255, 0, 0, 255)
)
handler.layer = 1
self.add_child(handler)
# handler.on_move(self.on_move_point)
self.handlers.append(handler)
self.points.append(point)
def on_mouse_move(self, evt):
if evt.root_event_obj is self:
self.on_mouse_move2(evt)
else:
self.points = []
for handler in self.handlers:
self.points.append(handler.position)
def is_over(self, x, y):
if self.closed and self._path and self._path.Contains(x, -y):
return self
def on_mouse_move2(self, evt):
mx, my = evt.position
if self.is_3d:
x, y, z = evt.viewer.get_coordinate_cursor(mx, my)
new_pos = (x, y, z)
else:
new_pos = mx, my
diff = [i - j for i, j in zip(new_pos, self._last_position)]
for n, point in enumerate(self.points):
self.points[n] = tuple((i + j for i, j in zip(diff, point)))
self.handlers[n].position = self.points[n]
self._last_position = new_pos
return True
def on_mouse_enter(self, evt):
pass
# self.interactive = True
# self.layer = 99
def on_mouse_leave(self, evt):
pass
# self.interactive = False
# self.layer = 0
def on_select(self, evt):
mx, my = evt.position
self.interactive = True
print("on_select", self.interactive)
if self.is_3d:
x, y, z = evt.viewer.get_coordinate_cursor(mx, my)
self._last_position = (x, y, z)
else:
self._last_position = (mx, my)
def on_deselect(self, evt):
self.interactive = False
return True
def convex_hull(self, points, merge=True):
spoints = sorted(points)
U = []
L = []
_dir = lambda o, a, b: (a[0] - o[0]) * (b[1] - o[1]) - (a[1] - o[1]) * (
b[0] - o[0]
)
for p in spoints:
while len(L) >= 2 and _dir(L[-2], L[-1], p) <= 0:
L.pop()
L.append(p)
for p in reversed(spoints):
while len(U) >= 2 and _dir(U[-2], U[-1], p) <= 0:
U.pop()
U.append(p)
if merge:
return U + L
return U, L
def get_all_antipodal_pairs(self, points):
U, L = self.convex_hull(points, merge=False)
i = 0
j = len(L) - 1
while i < len(U) - 1 or j > 0:
yield U[i], L[j]
if i == len(U) - 1:
j -= 1
elif j == 0:
i += 1
elif (U[i + 1][1] - U[i][1]) * (L[j][0] - L[j - 1][0]) > (
L[j][1] - L[j - 1][1]
) * (U[i + 1][0] - U[i][0]):
i += 1
else:
j -= 1
class Ellipse(CanvasHandlerBase):
def __init__(
self,
parent,
center,
point1,
point2,
fill=True,
line_colour=(255, 255, 255, 255),
fill_colour=(255, 255, 255, 128),
width=2,
interactive=True,
is_3d=True,
):
super(Ellipse, self).__init__(parent)
self.children = []
self.layer = 0
self.center = center
self.point1 = point1
self.point2 = point2
self.bbox = (0, 0, 0, 0)
self.fill = fill
self.line_colour = line_colour
if self.fill:
self.fill_colour = fill_colour
else:
self.fill_colour = (0, 0, 0, 0)
self.width = width
self._interactive = interactive
self.is_3d = is_3d
self.handler_1 = CircleHandler(
self, self.point1, is_3d=is_3d, fill_colour=(255, 0, 0, 255)
)
self.handler_1.layer = 1
self.handler_2 = CircleHandler(
self, self.point2, is_3d=is_3d, fill_colour=(255, 0, 0, 255)
)
self.handler_2.layer = 1
self.add_child(self.handler_1)
self.add_child(self.handler_2)
@property
def interactive(self):
return self._interactive
@interactive.setter
def interactive(self, value):
self._interactive = value
self.handler_1.visible = value
self.handler_2.visible = value
def draw_to_canvas(self, gc, canvas):
if self.visible:
if self.is_3d:
cx, cy = self._3d_to_2d(canvas.evt_renderer, self.center)
p1x, p1y = self._3d_to_2d(canvas.evt_renderer, self.point1)
p2x, p2y = self._3d_to_2d(canvas.evt_renderer, self.point2)
else:
cx, cy = self.center
p1x, p1y = self.point1
p2x, p2y = self.point2
width = abs(p1x - cx) * 2.0
height = abs(p2y - cy) * 2.0
self.bbox = canvas.draw_ellipse(
(cx, cy), width, height, self.width, self.line_colour, self.fill_colour
)
# if self.interactive:
# self.handler_1.draw_to_canvas(gc, canvas)
# self.handler_2.draw_to_canvas(gc, canvas)
def set_point1(self, pos):
self.point1 = pos
self.handler_1.position = pos
def set_point2(self, pos):
self.point2 = pos
self.handler_2.position = pos
def on_mouse_move(self, evt):
if evt.root_event_obj is self:
self.on_mouse_move2(evt)
else:
self.move_p1(evt)
self.move_p2(evt)
def move_p1(self, evt):
pos = self.handler_1.position
if evt.viewer.orientation == "AXIAL":
pos = pos[0], self.point1[1], self.point1[2]
elif evt.viewer.orientation == "CORONAL":
pos = pos[0], self.point1[1], self.point1[2]
elif evt.viewer.orientation == "SAGITAL":
pos = self.point1[0], pos[1], self.point1[2]
self.set_point1(pos)
if evt.control_down:
dist = np.linalg.norm(np.array(self.point1) - np.array(self.center))
vec = np.array(self.point2) - np.array(self.center)
vec /= np.linalg.norm(vec)
point2 = np.array(self.center) + vec * dist
self.set_point2(tuple(point2))
def move_p2(self, evt):
pos = self.handler_2.position
if evt.viewer.orientation == "AXIAL":
pos = self.point2[0], pos[1], self.point2[2]
elif evt.viewer.orientation == "CORONAL":
pos = self.point2[0], self.point2[1], pos[2]
elif evt.viewer.orientation == "SAGITAL":
pos = self.point2[0], self.point2[1], pos[2]
self.set_point2(pos)
if evt.control_down:
dist = np.linalg.norm(np.array(self.point2) - np.array(self.center))
vec = np.array(self.point1) - np.array(self.center)
vec /= np.linalg.norm(vec)
point1 = np.array(self.center) + vec * dist
self.set_point1(tuple(point1))
def on_mouse_enter(self, evt):
# self.interactive = True
pass
def on_mouse_leave(self, evt):
# self.interactive = False
pass
def is_over(self, x, y):
xi, yi, xf, yf = self.bbox
if xi <= x <= xf and yi <= y <= yf:
return self
def on_mouse_move2(self, evt):
mx, my = evt.position
if self.is_3d:
x, y, z = evt.viewer.get_coordinate_cursor(mx, my)
new_pos = (x, y, z)
else:
new_pos = mx, my
diff = [i - j for i, j in zip(new_pos, self._last_position)]
self.center = tuple((i + j for i, j in zip(diff, self.center)))
self.set_point1(tuple((i + j for i, j in zip(diff, self.point1))))
self.set_point2(tuple((i + j for i, j in zip(diff, self.point2))))
self._last_position = new_pos
return True
def on_select(self, evt):
self.interactive = True
mx, my = evt.position
if self.is_3d:
x, y, z = evt.viewer.get_coordinate_cursor(mx, my)
self._last_position = (x, y, z)
else:
self._last_position = (mx, my)
def on_deselect(self, evt):
self.interactive = False
return True
|
migrations | 0004_kafka | from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
from posthog.models.event.sql import (
DISTRIBUTED_EVENTS_TABLE_SQL,
WRITABLE_EVENTS_TABLE_SQL,
)
from posthog.models.person.sql import (
KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL,
KAFKA_PERSONS_TABLE_SQL,
PERSONS_DISTINCT_ID_TABLE_MV_SQL,
PERSONS_TABLE_MV_SQL,
)
# NOTE: this migration previously created kafka_events and events_mv tables.
# kafka_events was a Kafka ClickHouse engine table that used Protobuf for
# serialization. To remove complexity of deployments, Protobuf support has been
# removed from the PostHog app and the Plugin server. See
# https://github.com/PostHog/posthog/issues/9207 for detail.
#
# These have been superseded by kafka_events_json and events_json_mv. However,
# we can't simply add a DROP TABLE for the old tables as there may still be
# events in Kafka that need to be consumed. We'd need some orchestration around
# this to avoid losing in flight events. See migration
# ee/clickhouse/migrations/0025_json_events.py for details of the new tables.
#
# For new installs however, we don't need to be consider this case, so we can
# simply not create them.
#
# WARNING: this does however mean that you can arrive at different DB states
# depending on which versions of PostHog you have run.
operations = [
run_sql_with_exceptions(KAFKA_PERSONS_TABLE_SQL()),
run_sql_with_exceptions(KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL()),
run_sql_with_exceptions(PERSONS_TABLE_MV_SQL),
run_sql_with_exceptions(PERSONS_DISTINCT_ID_TABLE_MV_SQL),
run_sql_with_exceptions(WRITABLE_EVENTS_TABLE_SQL()),
run_sql_with_exceptions(DISTRIBUTED_EVENTS_TABLE_SQL()),
]
|
backfill | fix_preview_images | # The contents of this file are subject to the Common Public Attribution
# License Version 1.0. (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
# software over a computer network and provide for limited attribution for the
# Original Developer. In addition, Exhibit A has been modified to be consistent
# with Exhibit B.
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
# the specific language governing rights and limitations under the License.
#
# The Original Code is reddit.
#
# The Original Developer is the Initial Developer. The Initial Developer of
# the Original Code is reddit Inc.
#
# All portions of the code written by reddit are Copyright (c) 2006-2015 reddit
# Inc. All Rights Reserved.
###############################################################################
"""
Fix the urls of previously-uploaded preview images so they all work.
"""
import sys
import boto
import pycassa
from boto.s3.key import Key
from pylons import app_globals as g
from r2.lib.media import _get_scrape_url
from r2.lib.providers.media.s3 import S3MediaProvider
from r2.lib.utils import UrlParser
from r2.models.link import Link, LinksByImage
from r2.models.media_cache import MediaByURL
def good_preview_object(preview_object):
if not preview_object or not 'url' in preview_object:
print ' aborting - bad preview object: %s' % preview_object
return False
if not preview_object['url']:
print ' aborting - bad preview url: %s' % preview_object['url']
return False
return True
s3 = boto.connect_s3(g.S3KEY_ID or None, g.S3SECRET_KEY or None)
for uid, columns in LinksByImage._cf.get_range():
# When resuming, use:
#for uid, columns in LinksByImage._cf.get_range(start='<uid>'):
print 'Looking at image %s' % uid
link_ids = columns.keys()
links = Link._byID36(link_ids, return_dict=False, data=True)
if not links:
continue
# Pull information about the image from the first link (they *should* all
# be the same).
link = links[0]
preview_object = link.preview_object
if not good_preview_object(preview_object):
continue
u = UrlParser(preview_object['url'])
if preview_object['url'].startswith(g.media_fs_base_url_http):
# Uploaded to the local filesystem instead of s3. Should only be in
# dev.
print ' non-s3 image'
continue
elif u.hostname == 's3.amazonaws.com':
parts = u.path.lstrip('/').split('/')
bucket = parts.pop(0)
filename = '/'.join(parts)
else:
bucket = u.hostname
filename = u.path.lstrip('/')
print ' bucket: %s' % bucket
print ' filename: %s' % filename
if bucket in g.s3_image_buckets:
print ' skipping - already in correct place'
continue
k = Key(s3.get_bucket(bucket))
k.key = filename
k.copy(s3.get_bucket(g.s3_image_buckets[0]), filename)
url = 'http://s3.amazonaws.com/%s/%s' % (g.s3_image_buckets[0], filename)
print ' new url: %s' % url
for link in links:
print ' altering Link %s' % link
if not good_preview_object(link.preview_object):
continue
if not link.preview_object == preview_object:
print " aborting - preview objects don't match"
print ' first: %s' % preview_object
print ' ours: %s' % link.preview_object
continue
link.preview_object['url'] = url
link._commit()
# Guess at the key that'll contain the (now-incorrect) cache of the
# preview object so we can delete it and not end up inserting old info
# into new Links.
#
# These parameters are what's used in most of the code; the only place
# they're overridden is for promoted links, where they could be
# anything. We'll just have to deal with those as they come up.
image_url = _get_scrape_url(link)
cache_key = MediaByURL._rowkey(image_url, autoplay=False, maxwidth=600)
print ' deleting cache with key %s' % cache_key
cache = MediaByURL(_id=cache_key)
cache._committed = True
try:
cache._destroy()
except pycassa.cassandra.ttypes.InvalidRequestException as e:
print ' skipping cache deletion (%s)' % e.why
continue
# Delete *after* we've updated all the Links so they'll continue to work
# while we're in the migration process.
k.delete()
|
gui | app | import os
import sys
import config
import wx
from logbook import Logger
pyfalog = Logger(__name__)
from service.settings import LocaleSettings
class PyfaApp(wx.App):
def OnInit(self):
"""
Do application initialization work, e.g. define application globals.
"""
# Name for my application.
self.appName = "pyfa"
# ------------
# # Simplified init method.
# self.DoConfig()
# self.Init() # InspectionMixin
# # work around for Python stealing "_".
# sys.displayhook = _displayHook
#
# #------------
# Return locale folder.
localeDir = os.path.join(config.pyfaPath, "locale")
# Set language stuff and update to last used language.
self.locale = None
wx.Locale.AddCatalogLookupPathPrefix(localeDir)
# Set language stuff and update to last used language.
self.UpdateLanguage(config.language)
return True
# -----------------------------------------------------------------------
def UpdateLanguage(self, lang=None):
"""
Update the language to the requested one.
Make *sure* any existing locale is deleted before the new
one is created. The old C++ object needs to be deleted
before the new one is created, and if we just assign a new
instance to the old Python variable, the old C++ locale will
not be destroyed soon enough, likely causing a crash.
:param string `lang`: one of the supported language codes.
"""
# Language domain.
langDomain = config.CATALOG
# If an unsupported language is requested default to English.
if self.locale:
assert sys.getrefcount(self.locale) <= 2
del self.locale
# Create a locale object for this language.
langInfo = wx.Locale.FindLanguageInfo(lang)
if langInfo is not None:
pyfalog.debug("Setting language to: " + lang)
self.locale = wx.Locale(langInfo.Language)
if self.locale.IsOk():
success = self.locale.AddCatalog(langDomain)
if not success:
print("Langauage catalog not successfully loaded")
else:
pyfalog.debug("Cannot find langauge: " + lang)
self.locale = wx.Locale(
wx.Locale.FindLanguageInfo(LocaleSettings.defaults["locale"]).Language
)
|
parser | printer | """Conversion from internal data structures to text.
"""
__copyright__ = "Copyright (C) 2014-2018 Martin Blais"
__license__ = "GNU GPLv2"
import codecs
import datetime
import enum
import io
import re
import sys
import textwrap
from decimal import Decimal
from typing import Optional
from beancount.core import (
account,
amount,
convert,
data,
display_context,
interpolate,
inventory,
position,
)
from beancount.utils import misc_utils
def align_position_strings(strings):
"""A helper used to align rendered amounts positions to their first currency
character (an uppercase letter). This class accepts a list of rendered
positions and calculates the necessary width to render them stacked in a
column so that the first currency word aligns. It does not go beyond that
(further currencies, e.g. for the price or cost, are not aligned).
This is perhaps best explained with an example. The following positions will
be aligned around the column marked with '^':
45 HOOL {504.30 USD}
4 HOOL {504.30 USD, 2014-11-11}
9.95 USD
-22473.32 CAD @ 1.10 USD
^
Strings without a currency character will be rendered flush left.
Args:
strings: A list of rendered position or amount strings.
Returns:
A pair of a list of aligned strings and the width of the aligned strings.
"""
# Maximum length before the alignment character.
max_before = 0
# Maximum length after the alignment character.
max_after = 0
# Maximum length of unknown strings.
max_unknown = 0
string_items = []
search = re.compile("[A-Z]").search
for string in strings:
match = search(string)
if match:
index = match.start()
if index != 0:
max_before = max(index, max_before)
max_after = max(len(string) - index, max_after)
string_items.append((index, string))
continue
# else
max_unknown = max(len(string), max_unknown)
string_items.append((None, string))
# Compute formatting string.
max_total = max(max_before + max_after, max_unknown)
max_after_prime = max_total - max_before
fmt = "{{:>{0}}}{{:{1}}}".format(max_before, max_after_prime).format
fmt_unknown = "{{:<{0}}}".format(max_total).format
# Align the strings and return them.
# pylint: disable=format-string-without-interpolation
aligned_strings = []
for index, string in string_items:
# pylint: disable=format-string-without-interpolation
if index is not None:
string = fmt(string[:index], string[index:])
else:
string = fmt_unknown(string)
aligned_strings.append(string)
return aligned_strings, max_total
class EntryPrinter:
"""A multi-method interface for printing all directive types.
Attributes:
dcontext: An instance of DisplayContext with which to render all the numbers.
render_weight: A boolean, true if we should render the weight of the postings
as a comment, for debugging.
min_width_account: An integer, the minimum width to leave for the account name.
prefix: User-specific prefix for custom indentation (for Fava).
stringify_invalid_types: If a metadata value is invalid, force a conversion to
string for printout.
"""
# pylint: disable=invalid-name
def __init__(
self,
dcontext=None,
render_weight=False,
min_width_account=None,
prefix=None,
stringify_invalid_types=False,
):
self.dcontext = dcontext or display_context.DEFAULT_DISPLAY_CONTEXT
self.dformat = self.dcontext.build(precision=display_context.Precision.MOST_COMMON)
self.dformat_max = self.dcontext.build(precision=display_context.Precision.MAXIMUM)
self.render_weight = render_weight
self.min_width_account = min_width_account
self.prefix = prefix or " "
self.stringify_invalid_types = stringify_invalid_types
def __call__(self, obj):
"""Render a directive.
Args:
obj: The directive to be rendered.
Returns:
A string, the rendered directive.
"""
oss = io.StringIO()
method = getattr(self, obj.__class__.__name__)
method(obj, oss)
return oss.getvalue()
META_IGNORE = {"filename", "lineno"}
def write_metadata(self, meta, oss, prefix=None):
"""Write metadata to the file object, excluding filename and line number.
Args:
meta: A dict that contains the metadata for this directive.
oss: A file object to write to.
"""
if meta is None:
return
if prefix is None:
prefix = self.prefix
for key, value in sorted(meta.items()):
if key not in self.META_IGNORE and not key.startswith("__"):
value_str = None
if isinstance(value, str):
value_str = '"{}"'.format(misc_utils.escape_string(value))
elif isinstance(value, (Decimal, datetime.date, amount.Amount, enum.Enum)):
value_str = str(value)
elif isinstance(value, bool):
value_str = "TRUE" if value else "FALSE"
elif isinstance(value, (dict, inventory.Inventory)):
pass # Ignore dicts, don't print them out.
elif value is None:
value_str = "" # Render null metadata as empty, on purpose.
else:
if self.stringify_invalid_types:
# This is only intended to be used during development,
# when debugging for custom values of data types
# attached directly and not coming from the parser.
value_str = str(value)
else:
raise ValueError("Unexpected value: '{!r}'".format(value))
if value_str is not None:
oss.write("{}{}: {}\n".format(prefix, key, value_str))
def Transaction(self, entry, oss):
# Compute the string for the payee and narration line.
strings = []
if entry.payee:
strings.append('"{}"'.format(misc_utils.escape_string(entry.payee)))
if entry.narration:
strings.append('"{}"'.format(misc_utils.escape_string(entry.narration)))
elif entry.payee:
# Ensure we append an empty string for narration if we have a payee.
strings.append('""')
if entry.tags:
for tag in sorted(entry.tags):
strings.append("#{}".format(tag))
if entry.links:
for link in sorted(entry.links):
strings.append("^{}".format(link))
oss.write(
"{e.date} {flag} {}\n".format(
" ".join(strings), e=entry, flag=render_flag(entry.flag)
)
)
self.write_metadata(entry.meta, oss)
rows = [self.render_posting_strings(posting) for posting in entry.postings]
strs_account = [row[0] for row in rows]
width_account = (
max(len(flag_account) for flag_account in strs_account) if strs_account else 1
)
strs_position, width_position = align_position_strings(row[1] for row in rows)
strs_weight, width_weight = align_position_strings(row[2] for row in rows)
if self.min_width_account and self.min_width_account > width_account:
width_account = self.min_width_account
non_trivial_balance = (
any(map(interpolate.has_nontrivial_balance, entry.postings))
if self.render_weight and width_weight > 0
else False
)
if non_trivial_balance:
for posting, account, position, weight in zip(
entry.postings, strs_account, strs_position, strs_weight
):
oss.write(
f"{self.prefix}{account:{width_account}} "
f"{position:{width_position}} "
f"; {weight:{max(1, width_weight)}}".rstrip()
+ "\n"
)
if posting.meta:
self.write_metadata(posting.meta, oss, " ")
else:
for posting, account, position in zip(
entry.postings, strs_account, strs_position
):
oss.write(
f"{self.prefix}{account:{width_account}} "
f"{position:{max(1, width_position)}}".rstrip()
+ "\n"
)
if posting.meta:
self.write_metadata(posting.meta, oss, " ")
def render_posting_strings(self, posting):
"""This renders the three components of a posting: the account and its optional
posting flag, the position, and finally, the weight of the position. The
purpose is to align these in the caller.
Args:
posting: An instance of Posting, the posting to render.
Returns:
A tuple of
flag_account: A string, the account name including the flag.
position_str: A string, the rendered position string.
weight_str: A string, the rendered weight of the posting.
"""
# Render a string of the flag and the account.
flag = "{} ".format(render_flag(posting.flag)) if posting.flag else ""
flag_account = flag + posting.account
# Render a string with the amount and cost and optional price, if
# present. Also render a string with the weight.
weight_str = ""
if isinstance(posting.units, amount.Amount):
position_str = position.to_string(posting, self.dformat)
# Note: we render weights at maximum precision, for debugging.
if posting.cost is None or (
isinstance(posting.cost, position.Cost)
and isinstance(posting.cost.number, Decimal)
):
weight_str = str(convert.get_weight(posting))
else:
position_str = ""
if posting.price is not None:
position_str += " @ {}".format(posting.price.to_string(self.dformat_max))
return flag_account, position_str, weight_str
def Posting(self, posting, oss):
# Note: This is to be used when rendering postings directly only. The
# method rendering a transaction attempts to align the posting strings
# together.
flag_account, position_str, weight_str = self.render_posting_strings(posting)
oss.write(
"{}{:64} {} ; {}\n".format(
self.prefix, flag_account, position_str, weight_str
).rstrip()
)
if posting.meta:
self.write_metadata(posting.meta, oss, " ")
def Balance(self, entry, oss):
comment = " ; Diff: {}".format(entry.diff_amount) if entry.diff_amount else ""
number_str = (
self.dformat.format(entry.amount.number, entry.amount.currency)
if isinstance(entry.amount.number, Decimal)
else str(self.number)
)
# Render optional tolerance.
tolerance = ""
if entry.tolerance:
tolerance_fmt = self.dformat.format(entry.tolerance, entry.amount.currency)
tolerance = "~ {tolerance} ".format(tolerance=tolerance_fmt)
oss.write(
(
"{e.date} balance {e.account:47} {amount} {tolerance}{currency}"
"{comment}\n"
).format(
e=entry,
amount=number_str,
tolerance=tolerance,
currency=entry.amount.currency,
comment=comment,
)
)
self.write_metadata(entry.meta, oss)
def Note(self, entry, oss):
oss.write('{e.date} note {e.account} "{e.comment}"'.format(e=entry))
if entry.tags or entry.links:
oss.write(" ")
for tag in sorted(entry.tags):
oss.write("#{}".format(tag))
for link in sorted(entry.links):
oss.write("^{}".format(link))
oss.write("\n")
self.write_metadata(entry.meta, oss)
def Document(self, entry, oss):
oss.write('{e.date} document {e.account} "{e.filename}"'.format(e=entry))
if entry.tags or entry.links:
oss.write(" ")
for tag in sorted(entry.tags):
oss.write("#{}".format(tag))
for link in sorted(entry.links):
oss.write("^{}".format(link))
oss.write("\n")
self.write_metadata(entry.meta, oss)
def Pad(self, entry, oss):
oss.write("{e.date} pad {e.account} {e.source_account}\n".format(e=entry))
self.write_metadata(entry.meta, oss)
def Open(self, entry, oss):
oss.write(
"{e.date} open {e.account:47} {currencies} {booking}".format(
e=entry,
currencies=",".join(entry.currencies or []),
booking=(
'"{}"'.format(entry.booking.name) if entry.booking is not None else ""
),
).rstrip()
)
oss.write("\n")
self.write_metadata(entry.meta, oss)
def Close(self, entry, oss):
oss.write("{e.date} close {e.account}\n".format(e=entry))
self.write_metadata(entry.meta, oss)
def Commodity(self, entry, oss):
oss.write("{e.date} commodity {e.currency}\n".format(e=entry))
self.write_metadata(entry.meta, oss)
def Price(self, entry, oss):
oss.write(
"{e.date} price {e.currency:<22} {amount:>22}\n".format(
e=entry, amount=entry.amount.to_string(self.dformat_max)
)
)
self.write_metadata(entry.meta, oss)
def Event(self, entry, oss):
oss.write('{e.date} event "{e.type}" "{e.description}"\n'.format(e=entry))
self.write_metadata(entry.meta, oss)
def Query(self, entry, oss):
oss.write('{e.date} query "{e.name}" "{e.query_string}"\n'.format(e=entry))
self.write_metadata(entry.meta, oss)
def Custom(self, entry, oss):
custom_values = []
for value, dtype in entry.values:
if dtype is account.TYPE:
value = "{}".format(value)
elif isinstance(value, str):
value = '"{}"'.format(value)
elif isinstance(value, Decimal):
value = str(value)
elif isinstance(value, datetime.date):
value = value.isoformat()
elif isinstance(value, bool):
value = "TRUE" if value else "FALSE"
elif isinstance(value, amount.Amount):
value = value.to_string()
custom_values.append(value)
oss.write(
'{e.date} custom "{e.type}" {}\n'.format(" ".join(custom_values), e=entry)
)
self.write_metadata(entry.meta, oss)
def render_flag(inflag: Optional[str]) -> str:
"""Render a flag, which can be None, a symbol of a character to a string."""
if not inflag:
return ""
if re.match(r"[A-Z]$", inflag):
return "'{}".format(inflag)
return inflag
def format_entry(entry, dcontext=None, render_weights=False, prefix=None):
"""Format an entry into a string in the same input syntax the parser accepts.
Args:
entry: An entry instance.
dcontext: An instance of DisplayContext used to format the numbers.
render_weights: A boolean, true to render the weights for debugging.
Returns:
A string, the formatted entry.
"""
return EntryPrinter(dcontext, render_weights, prefix=prefix)(entry)
def print_entry(entry, dcontext=None, render_weights=False, file=None):
"""A convenience function that prints a single entry to a file.
Args:
entry: A directive entry.
dcontext: An instance of DisplayContext used to format the numbers.
render_weights: A boolean, true to render the weights for debugging.
file: An optional file object to write the entries to.
"""
# TODO(blais): DO remove this now, it's a huge annoyance not to be able to
# print in-between other statements.
output = file or (
codecs.getwriter("utf-8")(sys.stdout.buffer)
if hasattr(sys.stdout, "buffer")
else sys.stdout
)
output.write(format_entry(entry, dcontext, render_weights))
output.write("\n")
# TODO(blais): Change this to a function which accepts the same optional
# arguments as the printer object. Isolate the spacer/segmentation algorithm to
# its own function.
def print_entries(entries, dcontext=None, render_weights=False, file=None, prefix=None):
"""A convenience function that prints a list of entries to a file.
Args:
entries: A list of directives.
dcontext: An instance of DisplayContext used to format the numbers.
render_weights: A boolean, true to render the weights for debugging.
file: An optional file object to write the entries to.
"""
assert isinstance(entries, list), "Entries is not a list: {}".format(entries)
output = file or (
codecs.getwriter("utf-8")(sys.stdout.buffer)
if hasattr(sys.stdout, "buffer")
else sys.stdout
)
if prefix:
output.write(prefix)
previous_type = type(entries[0]) if entries else None
eprinter = EntryPrinter(dcontext, render_weights)
for entry in entries:
# Insert a newline between transactions and between blocks of directives
# of the same type.
entry_type = type(entry)
if (
entry_type in (data.Transaction, data.Commodity)
or entry_type is not previous_type
):
output.write("\n")
previous_type = entry_type
string = eprinter(entry)
output.write(string)
# TODO(blais): Rename to format_source() to be consistent, better:
# format_location().
def render_source(meta):
"""Render the source for errors in a way that it will be both detected by
Emacs and align and rendered nicely.
Args:
meta: A dict with the metadata.
Returns:
A string, rendered to be interpretable as a message location for Emacs or
other editors.
"""
return "{}:{:8}".format(meta["filename"], "{}:".format(meta["lineno"]))
def format_error(error):
"""Given an error objects, return a formatted string for it.
Args:
error: a namedtuple objects representing an error. It has to have an
'entry' attribute that may be either a single directive object or a
list of directive objects.
Returns:
A string, the errors rendered.
"""
oss = io.StringIO()
oss.write("{} {}\n".format(render_source(error.source), error.message))
if error.entry is not None:
entries = error.entry if isinstance(error.entry, list) else [error.entry]
error_string = "\n".join(format_entry(entry) for entry in entries)
oss.write("\n")
oss.write(textwrap.indent(error_string, " "))
oss.write("\n")
return oss.getvalue()
def print_error(error, file=None):
"""A convenience function that prints a single error to a file.
Args:
error: An error object.
file: An optional file object to write the errors to.
"""
output = file or sys.stdout
output.write(format_error(error))
output.write("\n")
def print_errors(errors, file=None, prefix=None):
"""A convenience function that prints a list of errors to a file.
Args:
errors: A list of errors.
file: An optional file object to write the errors to.
"""
output = file or sys.stdout
if prefix:
output.write(prefix)
for error in errors:
output.write(format_error(error))
output.write("\n")
|
PathTests | TestPathToolChangeGenerator | # -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2021 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import Path
import Path.Base.Generator.toolchange as generator
import PathTests.PathTestUtils as PathTestUtils
Path.Log.setLevel(Path.Log.Level.DEBUG, Path.Log.thisModule())
Path.Log.trackModule(Path.Log.thisModule())
class TestPathToolChangeGenerator(PathTestUtils.PathTestBase):
def test00(self):
"""Test Basic Tool Change Generator Return"""
args = {
"toolnumber": 1,
"toollabel": "My Label",
"spindlespeed": 500,
"spindledirection": generator.SpindleDirection.OFF,
}
results = generator.generate(**args)
# Get a label
self.assertTrue(len(results) == 2)
commentcommand = results[0]
self.assertTrue(isinstance(commentcommand, Path.Command))
self.assertTrue(commentcommand.toGCode() == "(My Label)")
# Get a tool command
toolcommand = results[1]
self.assertTrue(toolcommand.Name == "M6")
# Turn on the spindle
args["spindledirection"] = generator.SpindleDirection.CW
results = generator.generate(**args)
self.assertTrue(len(results) == 3)
speedcommand = results[2]
self.assertTrue(speedcommand.Name == "M3")
self.assertTrue(speedcommand.Parameters["S"] == 500)
# speed zero with spindle on
args["spindlespeed"] = 0
results = generator.generate(**args)
self.assertTrue(len(results) == 2)
Path.Log.track(results)
# negative spindlespeed
args["spindlespeed"] = -10
self.assertRaises(ValueError, generator.generate, **args)
|
mako | _ast_util | # mako/_ast_util.py
# Copyright (C) 2006-2016 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
ast
~~~
The `ast` module helps Python applications to process trees of the Python
abstract syntax grammar. The abstract syntax itself might change with
each Python release; this module helps to find out programmatically what
the current grammar looks like and allows modifications of it.
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
a flag to the `compile()` builtin function or by using the `parse()`
function from this module. The result will be a tree of objects whose
classes all inherit from `ast.AST`.
A modified abstract syntax tree can be compiled into a Python code object
using the built-in `compile()` function.
Additionally various helper functions are provided that make working with
the trees simpler. The main intention of the helper functions and this
module in general is to provide an easy to use interface for libraries
that work tightly with the python syntax (template engines for example).
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
from _ast import * # noqa
from mako.compat import arg_stringname
BOOLOP_SYMBOLS = {And: "and", Or: "or"}
BINOP_SYMBOLS = {
Add: "+",
Sub: "-",
Mult: "*",
Div: "/",
FloorDiv: "//",
Mod: "%",
LShift: "<<",
RShift: ">>",
BitOr: "|",
BitAnd: "&",
BitXor: "^",
}
CMPOP_SYMBOLS = {
Eq: "==",
Gt: ">",
GtE: ">=",
In: "in",
Is: "is",
IsNot: "is not",
Lt: "<",
LtE: "<=",
NotEq: "!=",
NotIn: "not in",
}
UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"}
ALL_SYMBOLS = {}
ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
ALL_SYMBOLS.update(BINOP_SYMBOLS)
ALL_SYMBOLS.update(CMPOP_SYMBOLS)
ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
def parse(expr, filename="<unknown>", mode="exec"):
"""Parse an expression into an AST node."""
return compile(expr, filename, mode, PyCF_ONLY_AST)
def to_source(node, indent_with=" " * 4):
"""
This function can convert a node tree back into python sourcecode. This
is useful for debugging purposes, especially if you're dealing with custom
asts not generated by python itself.
It could be that the sourcecode is evaluable when the AST itself is not
compilable / evaluable. The reason for this is that the AST contains some
more data than regular sourcecode does, which is dropped during
conversion.
Each level of indentation is replaced with `indent_with`. Per default this
parameter is equal to four spaces as suggested by PEP 8, but it might be
adjusted to match the application's styleguide.
"""
generator = SourceGenerator(indent_with)
generator.visit(node)
return "".join(generator.result)
def dump(node):
"""
A very verbose representation of the node passed. This is useful for
debugging purposes.
"""
def _format(node):
if isinstance(node, AST):
return "%s(%s)" % (
node.__class__.__name__,
", ".join("%s=%s" % (a, _format(b)) for a, b in iter_fields(node)),
)
elif isinstance(node, list):
return "[%s]" % ", ".join(_format(x) for x in node)
return repr(node)
if not isinstance(node, AST):
raise TypeError("expected AST, got %r" % node.__class__.__name__)
return _format(node)
def copy_location(new_node, old_node):
"""
Copy the source location hint (`lineno` and `col_offset`) from the
old to the new node if possible and return the new one.
"""
for attr in "lineno", "col_offset":
if (
attr in old_node._attributes
and attr in new_node._attributes
and hasattr(old_node, attr)
):
setattr(new_node, attr, getattr(old_node, attr))
return new_node
def fix_missing_locations(node):
"""
Some nodes require a line number and the column offset. Without that
information the compiler will abort the compilation. Because it can be
a dull task to add appropriate line numbers and column offsets when
adding new nodes this function can help. It copies the line number and
column offset of the parent node to the child nodes without this
information.
Unlike `copy_location` this works recursive and won't touch nodes that
already have a location information.
"""
def _fix(node, lineno, col_offset):
if "lineno" in node._attributes:
if not hasattr(node, "lineno"):
node.lineno = lineno
else:
lineno = node.lineno
if "col_offset" in node._attributes:
if not hasattr(node, "col_offset"):
node.col_offset = col_offset
else:
col_offset = node.col_offset
for child in iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, 1, 0)
return node
def increment_lineno(node, n=1):
"""
Increment the line numbers of all nodes by `n` if they have line number
attributes. This is useful to "move code" to a different location in a
file.
"""
for node in zip((node,), walk(node)):
if "lineno" in node._attributes:
node.lineno = getattr(node, "lineno", 0) + n
def iter_fields(node):
"""Iterate over all fields of a node, only yielding existing fields."""
# CPython 2.5 compat
if not hasattr(node, "_fields") or not node._fields:
return
for field in node._fields:
try:
yield field, getattr(node, field)
except AttributeError:
pass
def get_fields(node):
"""Like `iter_fiels` but returns a dict."""
return dict(iter_fields(node))
def iter_child_nodes(node):
"""Iterate over all child nodes or a node."""
for name, field in iter_fields(node):
if isinstance(field, AST):
yield field
elif isinstance(field, list):
for item in field:
if isinstance(item, AST):
yield item
def get_child_nodes(node):
"""Like `iter_child_nodes` but returns a list."""
return list(iter_child_nodes(node))
def get_compile_mode(node):
"""
Get the mode for `compile` of a given node. If the node is not a `mod`
node (`Expression`, `Module` etc.) a `TypeError` is thrown.
"""
if not isinstance(node, mod):
raise TypeError("expected mod node, got %r" % node.__class__.__name__)
return {Expression: "eval", Interactive: "single"}.get(node.__class__, "expr")
def get_docstring(node):
"""
Return the docstring for the given node or `None` if no docstring can be
found. If the node provided does not accept docstrings a `TypeError`
will be raised.
"""
if not isinstance(node, (FunctionDef, ClassDef, Module)):
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
if node.body and isinstance(node.body[0], Str):
return node.body[0].s
def walk(node):
"""
Iterate over all nodes. This is useful if you only want to modify nodes in
place and don't care about the context or the order the nodes are returned.
"""
from collections import deque
todo = deque([node])
while todo:
node = todo.popleft()
todo.extend(iter_child_nodes(node))
yield node
class NodeVisitor(object):
"""
Walks the abstract syntax tree and call visitor functions for every node
found. The visitor functions may return values which will be forwarded
by the `visit` method.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `get_visitor` function. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
Don't use the `NodeVisitor` if you want to apply changes to nodes during
traversing. For this a special visitor exists (`NodeTransformer`) that
allows modifications.
"""
def get_visitor(self, node):
"""
Return the visitor function for this node or `None` if no visitor
exists for this node. In that case the generic visit function is
used instead.
"""
method = "visit_" + node.__class__.__name__
return getattr(self, method, None)
def visit(self, node):
"""Visit a node."""
f = self.get_visitor(node)
if f is not None:
return f(node)
return self.generic_visit(node)
def generic_visit(self, node):
"""Called if no explicit visitor function exists for a node."""
for field, value in iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, AST):
self.visit(item)
elif isinstance(value, AST):
self.visit(value)
class NodeTransformer(NodeVisitor):
"""
Walks the abstract syntax tree and allows modifications of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor functions to replace or remove the old node. If the return
value of the visitor function is `None` the node will be removed
from the previous location otherwise it's replaced with the return
value. The return value may be the original node in which case no
replacement takes place.
Here an example transformer that rewrites all `foo` to `data['foo']`::
class RewriteName(NodeTransformer):
def visit_Name(self, node):
return copy_location(Subscript(
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
), node)
Keep in mind that if the node you're operating on has child nodes
you must either transform the child nodes yourself or call the generic
visit function for the node first.
Nodes that were part of a collection of statements (that applies to
all statement nodes) may also return a list of nodes rather than just
a single node.
Usually you use the transformer like this::
node = YourTransformer().visit(node)
"""
def generic_visit(self, node):
for field, old_value in iter_fields(node):
old_value = getattr(node, field, None)
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
class SourceGenerator(NodeVisitor):
"""
This visitor is able to transform a well formed syntax tree into python
sourcecode. For more details have a look at the docstring of the
`node_to_source` function.
"""
def __init__(self, indent_with):
self.result = []
self.indent_with = indent_with
self.indentation = 0
self.new_lines = 0
def write(self, x):
if self.new_lines:
if self.result:
self.result.append("\n" * self.new_lines)
self.result.append(self.indent_with * self.indentation)
self.new_lines = 0
self.result.append(x)
def newline(self, n=1):
self.new_lines = max(self.new_lines, n)
def body(self, statements):
self.new_line = True
self.indentation += 1
for stmt in statements:
self.visit(stmt)
self.indentation -= 1
def body_or_else(self, node):
self.body(node.body)
if node.orelse:
self.newline()
self.write("else:")
self.body(node.orelse)
def signature(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(", ")
else:
want_comma.append(True)
padding = [None] * (len(node.args) - len(node.defaults))
for arg, default in zip(node.args, padding + node.defaults):
write_comma()
self.visit(arg)
if default is not None:
self.write("=")
self.visit(default)
if node.vararg is not None:
write_comma()
self.write("*" + arg_stringname(node.vararg))
if node.kwarg is not None:
write_comma()
self.write("**" + arg_stringname(node.kwarg))
def decorators(self, node):
for decorator in node.decorator_list:
self.newline()
self.write("@")
self.visit(decorator)
# Statements
def visit_Assign(self, node):
self.newline()
for idx, target in enumerate(node.targets):
if idx:
self.write(", ")
self.visit(target)
self.write(" = ")
self.visit(node.value)
def visit_AugAssign(self, node):
self.newline()
self.visit(node.target)
self.write(BINOP_SYMBOLS[type(node.op)] + "=")
self.visit(node.value)
def visit_ImportFrom(self, node):
self.newline()
self.write("from %s%s import " % ("." * node.level, node.module))
for idx, item in enumerate(node.names):
if idx:
self.write(", ")
self.write(item)
def visit_Import(self, node):
self.newline()
for item in node.names:
self.write("import ")
self.visit(item)
def visit_Expr(self, node):
self.newline()
self.generic_visit(node)
def visit_FunctionDef(self, node):
self.newline(n=2)
self.decorators(node)
self.newline()
self.write("def %s(" % node.name)
self.signature(node.args)
self.write("):")
self.body(node.body)
def visit_ClassDef(self, node):
have_args = []
def paren_or_comma():
if have_args:
self.write(", ")
else:
have_args.append(True)
self.write("(")
self.newline(n=3)
self.decorators(node)
self.newline()
self.write("class %s" % node.name)
for base in node.bases:
paren_or_comma()
self.visit(base)
# XXX: the if here is used to keep this module compatible
# with python 2.6.
if hasattr(node, "keywords"):
for keyword in node.keywords:
paren_or_comma()
self.write(keyword.arg + "=")
self.visit(keyword.value)
if getattr(node, "starargs", None):
paren_or_comma()
self.write("*")
self.visit(node.starargs)
if getattr(node, "kwargs", None):
paren_or_comma()
self.write("**")
self.visit(node.kwargs)
self.write(have_args and "):" or ":")
self.body(node.body)
def visit_If(self, node):
self.newline()
self.write("if ")
self.visit(node.test)
self.write(":")
self.body(node.body)
while True:
else_ = node.orelse
if len(else_) == 1 and isinstance(else_[0], If):
node = else_[0]
self.newline()
self.write("elif ")
self.visit(node.test)
self.write(":")
self.body(node.body)
else:
self.newline()
self.write("else:")
self.body(else_)
break
def visit_For(self, node):
self.newline()
self.write("for ")
self.visit(node.target)
self.write(" in ")
self.visit(node.iter)
self.write(":")
self.body_or_else(node)
def visit_While(self, node):
self.newline()
self.write("while ")
self.visit(node.test)
self.write(":")
self.body_or_else(node)
def visit_With(self, node):
self.newline()
self.write("with ")
self.visit(node.context_expr)
if node.optional_vars is not None:
self.write(" as ")
self.visit(node.optional_vars)
self.write(":")
self.body(node.body)
def visit_Pass(self, node):
self.newline()
self.write("pass")
def visit_Print(self, node):
# XXX: python 2.6 only
self.newline()
self.write("print ")
want_comma = False
if node.dest is not None:
self.write(" >> ")
self.visit(node.dest)
want_comma = True
for value in node.values:
if want_comma:
self.write(", ")
self.visit(value)
want_comma = True
if not node.nl:
self.write(",")
def visit_Delete(self, node):
self.newline()
self.write("del ")
for idx, target in enumerate(node):
if idx:
self.write(", ")
self.visit(target)
def visit_TryExcept(self, node):
self.newline()
self.write("try:")
self.body(node.body)
for handler in node.handlers:
self.visit(handler)
def visit_TryFinally(self, node):
self.newline()
self.write("try:")
self.body(node.body)
self.newline()
self.write("finally:")
self.body(node.finalbody)
def visit_Global(self, node):
self.newline()
self.write("global " + ", ".join(node.names))
def visit_Nonlocal(self, node):
self.newline()
self.write("nonlocal " + ", ".join(node.names))
def visit_Return(self, node):
self.newline()
self.write("return ")
self.visit(node.value)
def visit_Break(self, node):
self.newline()
self.write("break")
def visit_Continue(self, node):
self.newline()
self.write("continue")
def visit_Raise(self, node):
# XXX: Python 2.6 / 3.0 compatibility
self.newline()
self.write("raise")
if hasattr(node, "exc") and node.exc is not None:
self.write(" ")
self.visit(node.exc)
if node.cause is not None:
self.write(" from ")
self.visit(node.cause)
elif hasattr(node, "type") and node.type is not None:
self.visit(node.type)
if node.inst is not None:
self.write(", ")
self.visit(node.inst)
if node.tback is not None:
self.write(", ")
self.visit(node.tback)
# Expressions
def visit_Attribute(self, node):
self.visit(node.value)
self.write("." + node.attr)
def visit_Call(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(", ")
else:
want_comma.append(True)
self.visit(node.func)
self.write("(")
for arg in node.args:
write_comma()
self.visit(arg)
for keyword in node.keywords:
write_comma()
self.write(keyword.arg + "=")
self.visit(keyword.value)
if getattr(node, "starargs", None):
write_comma()
self.write("*")
self.visit(node.starargs)
if getattr(node, "kwargs", None):
write_comma()
self.write("**")
self.visit(node.kwargs)
self.write(")")
def visit_Name(self, node):
self.write(node.id)
def visit_NameConstant(self, node):
self.write(str(node.value))
def visit_arg(self, node):
self.write(node.arg)
def visit_Str(self, node):
self.write(repr(node.s))
def visit_Bytes(self, node):
self.write(repr(node.s))
def visit_Num(self, node):
self.write(repr(node.n))
def visit_Tuple(self, node):
self.write("(")
idx = -1
for idx, item in enumerate(node.elts):
if idx:
self.write(", ")
self.visit(item)
self.write(idx and ")" or ",)")
def sequence_visit(left, right):
def visit(self, node):
self.write(left)
for idx, item in enumerate(node.elts):
if idx:
self.write(", ")
self.visit(item)
self.write(right)
return visit
visit_List = sequence_visit("[", "]")
visit_Set = sequence_visit("{", "}")
del sequence_visit
def visit_Dict(self, node):
self.write("{")
for idx, (key, value) in enumerate(zip(node.keys, node.values)):
if idx:
self.write(", ")
self.visit(key)
self.write(": ")
self.visit(value)
self.write("}")
def visit_BinOp(self, node):
self.write("(")
self.visit(node.left)
self.write(" %s " % BINOP_SYMBOLS[type(node.op)])
self.visit(node.right)
self.write(")")
def visit_BoolOp(self, node):
self.write("(")
for idx, value in enumerate(node.values):
if idx:
self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)])
self.visit(value)
self.write(")")
def visit_Compare(self, node):
self.write("(")
self.visit(node.left)
for op, right in zip(node.ops, node.comparators):
self.write(" %s " % CMPOP_SYMBOLS[type(op)])
self.visit(right)
self.write(")")
def visit_UnaryOp(self, node):
self.write("(")
op = UNARYOP_SYMBOLS[type(node.op)]
self.write(op)
if op == "not":
self.write(" ")
self.visit(node.operand)
self.write(")")
def visit_Subscript(self, node):
self.visit(node.value)
self.write("[")
self.visit(node.slice)
self.write("]")
def visit_Slice(self, node):
if node.lower is not None:
self.visit(node.lower)
self.write(":")
if node.upper is not None:
self.visit(node.upper)
if node.step is not None:
self.write(":")
if not (isinstance(node.step, Name) and node.step.id == "None"):
self.visit(node.step)
def visit_ExtSlice(self, node):
for idx, item in node.dims:
if idx:
self.write(", ")
self.visit(item)
def visit_Yield(self, node):
self.write("yield ")
self.visit(node.value)
def visit_Lambda(self, node):
self.write("lambda ")
self.signature(node.args)
self.write(": ")
self.visit(node.body)
def visit_Ellipsis(self, node):
self.write("Ellipsis")
def generator_visit(left, right):
def visit(self, node):
self.write(left)
self.visit(node.elt)
for comprehension in node.generators:
self.visit(comprehension)
self.write(right)
return visit
visit_ListComp = generator_visit("[", "]")
visit_GeneratorExp = generator_visit("(", ")")
visit_SetComp = generator_visit("{", "}")
del generator_visit
def visit_DictComp(self, node):
self.write("{")
self.visit(node.key)
self.write(": ")
self.visit(node.value)
for comprehension in node.generators:
self.visit(comprehension)
self.write("}")
def visit_IfExp(self, node):
self.visit(node.body)
self.write(" if ")
self.visit(node.test)
self.write(" else ")
self.visit(node.orelse)
def visit_Starred(self, node):
self.write("*")
self.visit(node.value)
def visit_Repr(self, node):
# XXX: python 2.6 only
self.write("`")
self.visit(node.value)
self.write("`")
# Helper Nodes
def visit_alias(self, node):
self.write(node.name)
if node.asname is not None:
self.write(" as " + node.asname)
def visit_comprehension(self, node):
self.write(" for ")
self.visit(node.target)
self.write(" in ")
self.visit(node.iter)
if node.ifs:
for if_ in node.ifs:
self.write(" if ")
self.visit(if_)
def visit_excepthandler(self, node):
self.newline()
self.write("except")
if node.type is not None:
self.write(" ")
self.visit(node.type)
if node.name is not None:
self.write(" as ")
self.visit(node.name)
self.write(":")
self.body(node.body)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.