section stringlengths 2 30 | filename stringlengths 1 82 | text stringlengths 783 28M |
|---|---|---|
sgui | main | #!/usr/bin/python3
"""
This file is part of the Stargate project, Copyright Stargate Team
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 3 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
"""
import gc
import os
import subprocess
import sys
import time
from sglib import constants
from sglib.ipc import *
from sglib.lib import engine
from sglib.lib import strings as sg_strings
from sglib.lib import util
from sglib.lib.appimage import *
from sglib.lib.engine import *
from sglib.lib.pidfile import create_pidfile
from sglib.lib.process import run_process
from sglib.lib.translate import _
from sglib.lib.util import *
from sglib.log import LOG
from sglib.math import clip_value, db_to_lin
from sglib.models import stargate as sg_project
from sglib.models import theme
from sgui import sgqt, widgets
from sgui.daw import entrypoint as daw
from sgui.daw.item_editor.audio._shared import remove_path_from_painter_path_cache
from sgui.ipc.socket import SocketIPCServer, SocketIPCTransport
from sgui.plugins import SgPluginUiDict
from sgui.project import (
StargateProjectVersionError,
check_project_version,
new_project,
open_project,
set_project,
)
from sgui.sgqt import *
from sgui.transport import TransportWidget
from sgui.util import (
check_for_empty_directory,
check_for_rw_perms,
get_font,
get_fps,
log_screen_info,
set_font,
show_generic_exception,
svg_to_pixmap,
ui_scaler_factory,
)
from sgui.widgets.file_browser import open_bookmarks
from . import shared
from .updates import ui_check_updates
HOST_INDEX_DAW = 0
HOST_INDEX_WAVE_EDIT = 1
PROJECT_FILE = None
def handle_engine_error(exit_code):
if exit_code == 0:
LOG.info("Audio engine stopped with exit code 0, no errors.")
return
if exit_code == 1000:
msg = "Audio device not found"
elif exit_code == 1001:
msg = "Device config not found"
elif exit_code == 1002:
msg = "Unknown error opening audio device"
if exit_code == 1003:
msg = (
"The audio device was busy, make sure that no other applications "
"are using the device and try restarting Stargate"
)
else:
msg = (
f"The audio engine stopped with exit code {exit_code}, "
"please try restarting Stargate"
)
shared.TRANSPORT.stop_button.setChecked(True)
LOG.error(msg)
QMessageBox.warning(
shared.MAIN_WINDOW,
"Error",
msg,
)
if exit_code >= 1000 and exit_code <= 1002:
shared.MAIN_WINDOW.on_change_audio_settings()
def engine_lib_callback(a_path, a_msg):
MAIN_WINDOW.engine_lib_callback(a_path, a_msg)
class SgMainWindow(QWidget):
MIDI_NOTES = {
"q": 0,
"w": 1,
"e": 2,
"r": 3,
"t": 4,
"y": 5,
"u": 6,
"i": 7,
"o": 8,
"p": 9,
"[": 10,
"]": 11,
}
daw_callback = Signal(str)
wave_edit_callback = Signal(str)
engine_mon_callback = Signal(str)
def __init__(self):
super().__init__()
def setup(self, scaler):
self.suppress_resize_events = False
shared.MAIN_WINDOW = self
constants.IPC_TRANSPORT = SocketIPCTransport()
with_audio = constants.IPC_TRANSPORT is not None
constants.IPC = StargateIPC(
constants.IPC_TRANSPORT,
with_audio,
)
constants.DAW_IPC = DawIPC(
constants.IPC_TRANSPORT,
with_audio,
)
constants.WAVE_EDIT_IPC = WaveEditIPC(
constants.IPC_TRANSPORT,
with_audio,
)
shared.TRANSPORT = TransportWidget(scaler)
self.setObjectName("plugin_ui")
self.setMinimumSize(900, 600)
self.last_ac_dir = util.HOME
self.setObjectName("plugin_ui")
self.main_layout = QVBoxLayout(self)
self.main_layout.setContentsMargins(0, 0, 0, 0)
self.transport_splitter = QSplitter(QtCore.Qt.Orientation.Vertical)
self.main_layout.addWidget(self.transport_splitter)
self.transport_widget = QWidget()
self.transport_widget.setMaximumHeight(51)
self.transport_widget.setObjectName("transport_panel")
self.transport_hlayout = QHBoxLayout(self.transport_widget)
self.transport_hlayout.setContentsMargins(2, 2, 2, 2)
self.transport_splitter.addWidget(self.transport_widget)
self.transport_widget.setSizePolicy(
QSizePolicy.Policy.Minimum,
QSizePolicy.Policy.Minimum,
)
self.transport_hlayout.addWidget(
shared.TRANSPORT.group_box,
alignment=QtCore.Qt.AlignmentFlag.AlignLeft,
)
self.transport_stack = QStackedWidget()
self.transport_hlayout.addWidget(
self.transport_stack,
alignment=QtCore.Qt.AlignmentFlag.AlignLeft,
)
self.transport_hlayout.addItem(
QSpacerItem(
1,
1,
QSizePolicy.Policy.Expanding,
),
)
if shared.HIDE_HINT_BOX:
sgqt.HINT_BOX.hide()
self.transport_hlayout.addWidget(sgqt.HINT_BOX)
sgqt.HINT_BOX.setFixedHeight(50)
sgqt.HINT_BOX.setFixedWidth(510)
self.main_stack = QStackedWidget()
self.transport_splitter.addWidget(self.main_stack)
SPLASH_SCREEN.status_update(_("Loading DAW"))
daw.init()
# Must do it here so that everything is initialized
daw.shared.HARDWARE_WIDGET.hardware_settings_button.pressed.connect(
self.on_change_audio_settings,
)
SPLASH_SCREEN.status_update(_("Loading Wave Editor"))
from sgui import wave_edit
wave_edit.init()
button = wave_edit.TRANSPORT.audio_inputs.hardware_settings_button
button.pressed.connect(self.on_change_audio_settings)
self.wave_editor_module = wave_edit
shared.HOST_MODULES = (daw, wave_edit)
self.host_windows = tuple(x.MAIN_WINDOW for x in shared.HOST_MODULES)
self.current_module = daw
self.current_window = daw.MAIN_WINDOW
self.engine_mon_callback.connect(daw.MAIN_WINDOW.engine_mon_label.setText)
self.engine_mon_callback.connect(wave_edit.MAIN_WINDOW.engine_mon_label.setText)
for f_module in shared.HOST_MODULES:
self.transport_stack.addWidget(f_module.TRANSPORT.group_box)
for f_window in self.host_windows:
self.main_stack.addWidget(f_window)
shared.IGNORE_CLOSE_EVENT = True
self.menu_bar = QMenu(shared.TRANSPORT.menu_button)
shared.TRANSPORT.menu_button.setMenu(self.menu_bar)
self.menu_file = self.menu_bar.addMenu(_("File"))
self.new_action = QAction(_("New..."), self.menu_file)
self.menu_file.addAction(self.new_action)
self.new_action.setToolTip("Create a new project and open it")
self.new_action.triggered.connect(self.on_new)
self.new_action.setShortcut(QKeySequence.StandardKey.New)
self.open_action = QAction(_("Open..."), self.menu_file)
self.menu_file.addAction(self.open_action)
self.open_action.setToolTip("Open another project, closing this project")
self.open_action.triggered.connect(self.on_open)
self.open_action.setShortcut(QKeySequence.StandardKey.Open)
self.close_action = QAction(_("Close Project..."), self.menu_file)
self.menu_file.addAction(self.close_action)
self.close_action.setToolTip(
"Close this project, return to the welcome screen to configure "
"hardware settings, recover projects or create/open another "
"project"
)
self.close_action.triggered.connect(self.on_close)
self.save_action = QAction("Save", self.menu_file)
self.menu_file.addAction(self.save_action)
self.save_action.setToolTip(
"Projects are automatically saved everytime you change anything, "
"this creates a timestamped backup that you can revert to later"
)
self.save_action.triggered.connect(self.on_save)
self.save_action.setShortcut(QKeySequence.StandardKey.Save)
self.save_as_action = QAction("Save As...", self.menu_file)
self.menu_file.addAction(self.save_as_action)
self.save_as_action.setToolTip(
"Projects are automatically saved everytime you change anything, "
"this creates a named backup that you can revert to later"
)
self.save_as_action.triggered.connect(self.on_save_as)
self.save_as_action.setShortcut(QKeySequence.StandardKey.SaveAs)
self.menu_file.addSeparator()
self.offline_render_action = QAction("Render...", self.menu_file)
self.menu_file.addAction(self.offline_render_action)
self.offline_render_action.setToolTip(
"Convert this project to an audio file. Set the region markers "
"by right clicking on the sequencer timeline, or set the "
"start/end markers in the wave editor"
)
self.offline_render_action.triggered.connect(self.on_offline_render)
self.audio_device_action = QAction(
"Hardware Settings...",
self.menu_file,
)
self.menu_file.addAction(self.audio_device_action)
self.audio_device_action.setToolTip(
"Open the hardware settings dialog to change audio or MIDI "
"device settings"
)
self.audio_device_action.triggered.connect(
self.on_change_audio_settings,
)
self.menu_file.addSeparator()
self.quit_action = QAction("Quit", self.menu_file)
self.menu_file.addAction(self.quit_action)
self.quit_action.setToolTip("Close Stargate DAW")
self.quit_action.triggered.connect(shared.MAIN_STACKED_WIDGET.close)
self.quit_action.setShortcut(QKeySequence.StandardKey.Quit)
# self.menu_edit = self.menu_bar.addMenu(_("Edit"))
# self.undo_action = self.menu_edit.addAction(_("Undo"))
# self.undo_action.triggered.connect(self.on_undo)
# self.undo_action.setShortcut(QKeySequence.StandardKey.Undo)
# self.redo_action = self.menu_edit.addAction(_("Redo"))
# self.redo_action.triggered.connect(self.on_redo)
# self.redo_action.setShortcut(QKeySequence.StandardKey.Redo)
self.menu_appearance = self.menu_bar.addMenu(_("Appearance"))
self.collapse_splitters_action = QAction(
_("Toggle Collapse Transport"),
self.menu_appearance,
)
self.menu_appearance.addAction(self.collapse_splitters_action)
self.addAction(self.collapse_splitters_action)
self.collapse_splitters_action.setToolTip(
"Toggle collapsing the transport, create more room for the "
"sequencer or wave editor"
)
self.collapse_splitters_action.triggered.connect(
self.on_collapse_splitters,
)
self.collapse_splitters_action.setShortcut(QKeySequence("CTRL+Up"))
self.menu_appearance.addSeparator()
self.full_screen_action = QAction(
"Toggle Full Screen",
self.menu_appearance,
)
self.menu_appearance.addAction(self.full_screen_action)
self.addAction(self.full_screen_action)
self.full_screen_action.setToolTip(
"Toggle between full screen mode and normal mode"
)
self.full_screen_action.triggered.connect(
shared.MAIN_STACKED_WIDGET.toggle_full_screen
)
self.full_screen_action.setShortcut(QKeySequence("ALT+F12"))
self.menu_appearance.addSeparator()
self.open_theme_action = QAction("Open Theme...", self.menu_appearance)
self.menu_appearance.addAction(self.open_theme_action)
self.open_theme_action.setToolTip(
"Open a new sgtheme file to change the appearance of Stargate. "
"There are several factory themes, or create your own"
)
self.open_theme_action.triggered.connect(self.on_open_theme)
self.default_theme_action = QAction(
"Use Default Theme",
self.menu_appearance,
)
self.menu_appearance.addAction(self.default_theme_action)
self.default_theme_action.setToolTip(
"Use the default theme, replacing any other theme you have used"
)
self.default_theme_action.triggered.connect(
self.on_use_default_theme,
)
self.copy_theme_action = QAction(
_("Copy Theme to New Theme..."),
self.menu_appearance,
)
self.menu_appearance.addAction(self.copy_theme_action)
self.copy_theme_action.setToolTip(
"Create a copy of the current theme that you can customize and "
"create your own theme from"
)
self.copy_theme_action.triggered.connect(self.on_copy_theme)
self.menu_appearance.addSeparator()
self.custom_font_action = QAction(
_("Choose custom font..."),
self.menu_appearance,
)
self.menu_appearance.addAction(self.custom_font_action)
self.custom_font_action.setToolTip("Choose a custom font for Stargate")
self.custom_font_action.triggered.connect(self.on_custom_font)
self.clear_custom_font_action = QAction(
_("Use default font"),
self.menu_appearance,
)
self.menu_appearance.addAction(self.clear_custom_font_action)
self.clear_custom_font_action.setToolTip(
"Use the default font included with Stargate"
)
self.clear_custom_font_action.triggered.connect(
self.on_clear_custom_font,
)
if not util.IS_WINDOWS:
self.menu_tools = self.menu_bar.addMenu(_("Tools"))
self.mp3_action = QAction(_("MP3 Converter..."), self.menu_tools)
self.menu_tools.addAction(self.mp3_action)
self.mp3_action.setToolTip(
"Open a dialog to convert MP3 files to and from .wav files"
)
self.mp3_action.triggered.connect(self.mp3_converter_dialog)
self.ogg_action = QAction(_("Ogg Converter..."), self.menu_tools)
self.menu_tools.addAction(self.ogg_action)
self.ogg_action.setToolTip(
"Open a dialog to convert OGG files to and from .wav files"
)
self.ogg_action.triggered.connect(self.ogg_converter_dialog)
self.menu_help = self.menu_bar.addMenu(_("Help"))
self.sfzinstruments_action = self.menu_help.addAction(
_("Download SFZ instruments for Sampler1"),
)
self.sfzinstruments_action.triggered.connect(self.on_sfzinstruments)
self.samplepack_action = self.menu_help.addAction(
_("Download the official Stargate DAW sample pack"),
)
self.samplepack_action.triggered.connect(self.on_samplepack)
self.version_action = self.menu_help.addAction(_("Version Info..."))
self.version_action.triggered.connect(self.on_version)
self.menu_bar.addSeparator()
self.menu_devel = self.menu_bar.addMenu(_("Developer"))
self.menu_devel_copy = self.menu_devel.addMenu(
_("Copy to clipboard..."),
)
self.copy_gdb_cmd_action = self.menu_devel_copy.addAction(
_("GDB run command"),
)
self.copy_gdb_cmd_action.triggered.connect(self.copy_gdb_run_cmd)
self.copy_valgrind_cmd_action = self.menu_devel_copy.addAction(
_("Valgrind command"),
)
self.copy_valgrind_cmd_action.triggered.connect(
self.copy_valgrind_cmd,
)
self.menu_bar.addSeparator()
self.check_updates_action = QAction(
"Check for updates...",
self.menu_bar,
)
self.menu_bar.addAction(self.check_updates_action)
self.check_updates_action.setToolTip(
"Check to see if you are running the latest version of " "Stargate DAW"
)
self.check_updates_action.triggered.connect(ui_check_updates)
self.tooltips_action = QAction(_("Hide Hint Box"), self.menu_bar)
self.tooltips_action.setToolTip(
"Toggle hiding the hint box in the upper right corner of the " "screen"
)
self.menu_bar.addAction(self.tooltips_action)
self.tooltips_action.setCheckable(True)
self.tooltips_action.setChecked(shared.HIDE_HINT_BOX)
self.tooltips_action.triggered.connect(self.set_tooltips_enabled)
if all(x in os.environ for x in ("APPIMAGE", "APPDIR")):
self.appimage_install_action = QAction(
"Install AppImage to start menu",
self.menu_bar,
)
self.appimage_install_action.setToolTip(
"Install a start menu shortcut for your user account. You "
"must run this again if you move the AppImage, and again "
"every time you download a new version of the AppImage"
)
self.appimage_install_action.triggered.connect(
self.appimage_install,
)
self.appimage_uninstall_action = QAction(
"Uninstall AppImage from the start menu",
self.menu_bar,
)
self.appimage_uninstall_action.setToolTip(
"Uninstall Stargate DAW AppImage from the start menu. "
"This will not uninstall a start menu shortcut installed "
"by other packages"
)
self.appimage_uninstall_action.triggered.connect(
self.appimage_uninstall,
)
desktop_dir, desktop_path = appimage_start_menu_path()
if os.path.exists(desktop_path):
self.menu_bar.addAction(self.appimage_uninstall_action)
else:
self.menu_bar.addAction(self.appimage_install_action)
self.spacebar_action = QAction(self)
self.addAction(self.spacebar_action)
self.spacebar_action.triggered.connect(self.on_spacebar)
self.spacebar_action.setShortcut(
QKeySequence(QtCore.Qt.Key.Key_Space),
)
self.subprocess_timer = None
self.socket_server = None
self.socket_server = SocketIPCServer(
daw.MAIN_WINDOW.configure_callback,
wave_edit.MAIN_WINDOW.configure_callback,
)
self.socket_server.start()
if util.WITH_AUDIO:
self.subprocess_timer = QtCore.QTimer(self)
self.subprocess_timer.timeout.connect(self.subprocess_monitor)
self.subprocess_timer.setSingleShot(False)
self.subprocess_timer.start(1000)
self.on_collapse_splitters(a_restore=True)
def appimage_install(self):
appimage_start_menu_install()
self.menu_bar.removeAction(
self.appimage_install_action,
)
self.menu_bar.addAction(
self.appimage_uninstall_action,
)
QMessageBox.information(None, "Success!", "Added start menu shortcut")
def appimage_uninstall(self):
desktop_dir, desktop_path = appimage_start_menu_path()
os.remove(desktop_path)
self.menu_bar.removeAction(
self.appimage_uninstall_action,
)
self.menu_bar.addAction(
self.appimage_install_action,
)
QMessageBox.information(
None,
"Success!",
"Removed start menu shortcut",
)
def set_tooltips_enabled(self):
hidden = self.tooltips_action.isChecked()
if hidden:
sgqt.HINT_BOX.hide()
else:
sgqt.HINT_BOX.show()
util.set_file_setting("hide-hint-box", 1 if hidden else 0)
def _key_event(self, ev, press):
super().keyPressEvent(ev)
if shared.IS_PLAYING or shared.IS_RECORDING:
return
try:
host = shared.TRANSPORT.current_host()
key = str(ev.text())
if host == HOST_INDEX_DAW and key in self.MIDI_NOTES:
rack = daw.shared.PLUGIN_RACK.rack_index()
note_offset = daw.shared.PLUGIN_RACK.octave() * 12
channel = daw.shared.PLUGIN_RACK.midi_channel()
note = self.MIDI_NOTES[key] + note_offset
assert note >= 0 and note <= 120, note
if press:
constants.DAW_IPC.note_on(rack, note, channel)
else:
constants.DAW_IPC.note_off(rack, note, channel)
except Exception as ex:
LOG.exception(ex)
def keyPressEvent(self, ev):
self._key_event(ev, True)
def keyReleaseEvent(self, ev):
self._key_event(ev, False)
def on_custom_font(self):
font = get_font()
font.choose_font()
def on_clear_custom_font(self):
font = get_font()
font.clear_font()
def _copy_to_clipboard(self, text):
cb = QApplication.clipboard()
cb.clear(mode=cb.Mode.Clipboard)
cb.setText(text, mode=cb.Mode.Clipboard)
def copy_gdb_run_cmd(self):
text = "run '{}' '{}' {} 0 30 1 --sleep".format(
util.INSTALL_PREFIX,
constants.PROJECT_DIR,
os.getpid(),
)
self._copy_to_clipboard(text)
def copy_valgrind_cmd(self):
bin_path, ext = os.path.splitext(util.BIN_PATH)
text = (
"valgrind '{}' '{}' '{}' {} 0 1 1 " "--no-hardware --single-thread"
).format(
bin_path + "-dbg" + ext,
util.INSTALL_PREFIX,
constants.PROJECT_DIR,
os.getpid(),
)
self._copy_to_clipboard(text)
def on_samplepack(self):
url = QtCore.QUrl(
"https://github.com/stargatedaw/stargate-sample-pack",
)
QDesktopServices.openUrl(url)
def on_sfzinstruments(self):
url = QtCore.QUrl(
"https://github.com/sfzinstruments",
)
QDesktopServices.openUrl(url)
def engine_lib_callback(self, a_path, a_msg):
f_path = a_path.decode("utf-8")
f_msg = a_msg.decode("utf-8")
self.engine_callback_dict[f_path].emit(f_msg)
def resizeEvent(self, a_event):
if self.suppress_resize_events:
return
super().resizeEvent(a_event)
def open_in_wave_editor(self, a_file):
shared.TRANSPORT.host_combobox.setCurrentIndex(HOST_INDEX_WAVE_EDIT)
self.main_stack.repaint()
self.wave_editor_module.WAVE_EDITOR.open_file(a_file)
# self.wave_editor_module.WAVE_EDITOR.sample_graph.repaint()
def set_host(self, a_index):
self.transport_stack.setCurrentIndex(a_index)
self.main_stack.setCurrentIndex(a_index)
self.current_module = shared.HOST_MODULES[a_index]
self.current_window = self.host_windows[a_index]
shared.CURRENT_HOST = a_index
constants.IPC.set_host(a_index)
self.current_module.TRANSPORT.set_time()
def show_offline_rendering_wait_window_v2(
self, a_cmd_list, a_file_name, f_file_name=None
):
if not f_file_name:
f_file_name = "{}.finished".format(a_file_name)
def ok_handler():
if os.path.isfile(a_file_name):
constants.PROJECT.reload_audio_file(a_file_name)
remove_path_from_painter_path_cache(a_file_name)
f_window.close()
def cancel_handler():
f_timer.stop()
try:
f_proc.kill()
except Exception as ex:
LOG.error(
f"Exception while killing render process\n{ex}",
)
LOG.exception(ex)
if os.path.isfile(a_file_name):
os.remove(a_file_name)
if os.path.isfile(f_file_name):
os.remove(f_file_name)
f_window.close()
def timeout_handler():
if f_proc.poll() is not None:
f_timer.stop()
f_ok.setEnabled(True)
f_cancel.setEnabled(False)
f_time_label.setText(
_("Finished in:"),
)
if os.path.isfile(f_file_name):
# Some times this does not exist, not sure why
os.remove(f_file_name)
f_proc.communicate()[0]
# f_output = f_proc.communicate()[0]
# LOG.info(f_output)
exit_code = f_proc.returncode
if exit_code != 0:
f_window.close()
QMessageBox.warning(
self,
_("Error"),
_(f"Render exited with code {exit_code}"),
)
else:
f_elapsed_time = time.time() - f_start_time
clock.display(str(round(f_elapsed_time, 1)))
f_proc = run_process(a_cmd_list)
f_start_time = time.time()
f_window = QDialog(
MAIN_WINDOW,
(
QtCore.Qt.WindowType.WindowTitleHint
| QtCore.Qt.WindowType.FramelessWindowHint
),
)
f_window.setWindowTitle(_("Rendering to .wav, please wait"))
f_window.setMinimumSize(360, 180)
f_layout = QVBoxLayout()
f_window.setLayout(f_layout)
f_time_label = QLabel("Elapsed Time:")
f_time_label.setMinimumWidth(360)
f_layout.addWidget(f_time_label)
clock = QLCDNumber()
clock.setDigitCount(7)
clock.setMinimumWidth(210)
clock.display("0:00.0")
f_layout.addWidget(clock)
f_timer = QtCore.QTimer()
f_timer.timeout.connect(timeout_handler)
f_ok_cancel_layout = QHBoxLayout()
f_ok_cancel_layout.addItem(
QSpacerItem(1, 1, QSizePolicy.Policy.Expanding),
)
f_layout.addLayout(f_ok_cancel_layout)
f_ok = QPushButton(_("OK"))
f_ok.setMinimumWidth(75)
f_ok.pressed.connect(ok_handler)
f_ok.setEnabled(False)
f_ok_cancel_layout.addWidget(f_ok)
f_cancel = QPushButton(_("Cancel"))
f_cancel.setMinimumWidth(75)
f_cancel.pressed.connect(cancel_handler)
f_ok_cancel_layout.addWidget(f_cancel)
f_timer.start(20)
f_window.exec()
def subprocess_monitor(self):
try:
if engine.ENGINE_PSUTIL:
cpu = round(engine.ENGINE_PSUTIL.cpu_percent(), 1)
mem = round(engine.ENGINE_PSUTIL.memory_percent(), 1)
text = f"CPU: {cpu}% RAM: {mem}%"
self.engine_mon_callback.emit(text)
except:
pass
try:
if engine.ENGINE_SUBPROCESS and engine.ENGINE_SUBPROCESS.poll() is not None:
self.subprocess_timer.stop()
exitCode = engine.ENGINE_SUBPROCESS.returncode
handle_engine_error(exitCode)
except Exception as ex:
LOG.error("subprocess_monitor: {}".format(ex))
LOG.exception(ex)
def on_new(self):
if shared.IS_PLAYING:
return
if new_project(self):
self.prepare_to_quit()
shared.MAIN_STACKED_WIDGET.start()
def on_open(self):
if shared.IS_PLAYING:
return
if open_project(self):
self.prepare_to_quit()
shared.MAIN_STACKED_WIDGET.start()
def on_close(self):
if shared.IS_PLAYING:
return
answer = QMessageBox.question(
shared.MAIN_STACKED_WIDGET,
_("Warning"),
_("Close this project and return to the welcome screen?"),
(QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.Cancel),
QMessageBox.StandardButton.Cancel,
)
if answer == QMessageBox.StandardButton.Cancel:
return
daw.shared.ITEM_EDITOR.clear_solo_loop()
self.prepare_to_quit()
shared.MAIN_STACKED_WIDGET.show_welcome()
def on_save(self):
shared.PLUGIN_UI_DICT.save_all_plugin_state()
constants.PROJECT.create_backup()
def on_save_as(self):
if shared.IS_PLAYING:
return
def ok_handler():
f_name = str(f_lineedit.text()).strip()
f_name = f_name.replace("/", "")
if f_name:
shared.PLUGIN_UI_DICT.save_all_plugin_state()
if constants.PROJECT.create_backup(f_name):
f_window.close()
else:
QMessageBox.warning(
self,
_("Error"),
_("This name already exists, please choose " "another name"),
)
f_window = QDialog(parent=MAIN_WINDOW)
f_window.setWindowTitle(_("Save As..."))
f_layout = QVBoxLayout(f_window)
f_lineedit = QLineEdit()
f_lineedit.setToolTip(
"A descriptive name for this backup, you will be able to select "
"it from the project recovery window in the welcome screen"
)
f_lineedit.setMinimumWidth(240)
f_lineedit.setMaxLength(48)
f_layout.addWidget(f_lineedit)
f_layout.addItem(
QSpacerItem(
1,
1,
QSizePolicy.Policy.Minimum,
QSizePolicy.Policy.Expanding,
),
)
f_ok_layout = QHBoxLayout()
f_layout.addLayout(f_ok_layout)
f_ok_button = QPushButton(_("OK"))
f_ok_button.pressed.connect(ok_handler)
f_ok_layout.addWidget(f_ok_button)
f_cancel_button = QPushButton(_("Cancel"))
f_ok_layout.addWidget(f_cancel_button)
f_cancel_button.pressed.connect(f_window.close)
f_window.set_focus(f_lineedit)
f_window.exec()
def prepare_to_quit(self):
try:
self.setUpdatesEnabled(False)
close_engine()
shared.PLUGIN_UI_DICT.close_all_plugin_windows()
if self.socket_server is not None:
self.socket_server.free()
for f_host in self.host_windows:
f_host.prepare_to_quit()
self.main_stack.removeWidget(f_host)
f_host.setParent(None)
for f_module in shared.HOST_MODULES:
self.transport_stack.removeWidget(
f_module.TRANSPORT.group_box,
)
f_module.TRANSPORT.group_box.setParent(None)
shared.IGNORE_CLOSE_EVENT = False
if self.subprocess_timer:
self.subprocess_timer.stop()
shared.prepare_to_quit()
except Exception as ex:
LOG.error(
"Exception thrown while attempting to exit, " "forcing Stargate to exit"
)
LOG.exception(ex)
exit(999)
def on_change_audio_settings(self):
def callback():
shared.MAIN_STACKED_WIDGET.start()
self.prepare_to_quit()
shared.MAIN_STACKED_WIDGET.show_hardware_dialog(callback, callback)
def on_use_default_theme(self):
util.clear_file_setting("default-style")
QMessageBox.warning(
MAIN_WINDOW,
_("Theme Applied..."),
_("Changed theme. Please restart Stargate DAW"),
)
def on_copy_theme(self):
try:
path, _filter = QFileDialog.getSaveFileName(
MAIN_WINDOW,
_("Copy a theme directory"),
util.THEMES_DIR,
options=QFileDialog.Option.DontUseNativeDialog,
)
if path and str(path):
path = str(path)
if os.path.exists(path):
QMessageBox.warning(
MAIN_WINDOW,
_("Error"),
_(f"{path} already exists"),
)
return
theme.copy_theme(path)
except Exception as ex:
LOG.exception(ex)
show_generic_exception(ex)
def on_open_theme(self):
try:
f_file, f_filter = QFileDialog.getOpenFileName(
MAIN_WINDOW,
_("Open a theme file"),
util.THEMES_DIR,
"Stargate Theme (*.sgtheme)",
options=QFileDialog.Option.DontUseNativeDialog,
)
if f_file and str(f_file):
f_file = str(f_file)
scaler = ui_scaler_factory()
font_size, font_unit = get_font().get_font_size()
try:
theme.set_theme(f_file, scaler, font_size, font_unit)
except Exception as ex:
show_generic_exception(
ex,
_("Could not load the theme"),
)
return
QMessageBox.warning(
MAIN_WINDOW,
_("Theme Applied..."),
_("Changed theme. Please restart Stargate DAW"),
)
except Exception as ex:
show_generic_exception(ex)
def on_version(self):
f_window = QDialog(MAIN_WINDOW)
f_window.setWindowTitle(_("Version Info"))
f_window.setFixedSize(420, 150)
f_layout = QVBoxLayout()
f_window.setLayout(f_layout)
f_minor_version = util.META_DOT_JSON["version"]["minor"]
f_version = QLabel(
f"{constants.MAJOR_VERSION}-{f_minor_version}",
)
f_version.setTextInteractionFlags(
QtCore.Qt.TextInteractionFlag.TextSelectableByMouse,
)
f_layout.addWidget(f_version)
f_ok_button = QPushButton(_("OK"))
f_layout.addWidget(f_ok_button)
f_ok_button.pressed.connect(f_window.close)
f_window.exec()
def on_spacebar(self):
shared.TRANSPORT.on_spacebar()
def on_collapse_splitters(self, a_restore=False):
if a_restore or not self.transport_splitter.sizes()[0]:
self.transport_splitter.setSizes([100, 9999])
else:
self.transport_splitter.setSizes([0, 9999])
def mp3_converter_dialog(self):
if which("avconv"):
f_enc = "avconv"
elif which("ffmpeg"):
f_enc = "ffmpeg"
else:
f_enc = "avconv"
f_lame = "lame"
for f_app in (f_enc, f_lame):
if which(f_app) is None:
QMessageBox.warning(
self, _("Error"), sg_strings.avconv_error.format(f_app)
)
return
self.audio_converter_dialog("lame", f_enc, "mp3")
def ogg_converter_dialog(self):
if which("oggenc") is None or which("oggdec") is None:
QMessageBox.warning(
self,
_("Error"),
_("Error, vorbis-tools are not installed"),
)
return
self.audio_converter_dialog("oggenc", "oggdec", "ogg")
def audio_converter_dialog(self, a_enc, a_dec, a_label):
def get_cmd(f_input_file, f_output_file):
if f_wav_radiobutton.isChecked():
if a_dec == "avconv" or a_dec == "ffmpeg":
f_cmd = [
which(a_dec),
"-i",
f_input_file,
f_output_file,
]
elif a_dec == "oggdec":
f_cmd = [
which(a_dec),
"--output",
f_output_file,
f_input_file,
]
else:
if a_enc == "oggenc":
f_quality = float(str(f_mp3_br_combobox.currentText()))
f_quality = (320.0 / f_quality) * 10.0
f_quality = clip_value(f_quality, 3.0, 10.0)
f_cmd = [
which(a_enc),
"-q",
str(f_quality),
"-o",
f_output_file,
f_input_file,
]
elif a_enc == "lame":
f_cmd = [
which(a_enc),
"-b",
str(f_mp3_br_combobox.currentText()),
f_input_file,
f_output_file,
]
LOG.info(f_cmd)
return f_cmd
def ok_handler():
f_input_file = str(f_name.text())
f_output_file = str(f_output_name.text())
if not f_input_file or not f_output_file:
QMessageBox.warning(
f_window,
_("Error"),
_("File names cannot be empty"),
)
return
if f_batch_checkbox.isChecked():
if f_wav_radiobutton.isChecked():
f_ext = ".{}".format(a_label)
else:
f_ext = ".wav"
f_ext = f_ext.upper()
f_list = [
x for x in os.listdir(f_input_file) if x.upper().endswith(f_ext)
]
if not f_list:
QMessageBox.warning(
f_window,
_("Error"),
_("No {} files in {}".format(f_ext, f_input_file)),
)
return
f_proc_list = []
for f_file in f_list:
f_in = os.path.join(f_input_file, f_file)
f_out = os.path.join(
f_output_file,
"{}{}".format(
f_file.rsplit(".", 1)[0],
self.ac_ext,
),
)
f_cmd = get_cmd(f_in, f_out)
f_proc = subprocess.Popen(f_cmd)
f_proc_list.append((f_proc, f_out))
for f_proc, f_out in f_proc_list:
f_status_label.setText(f_out)
QApplication.processEvents()
f_proc.communicate()
else:
f_cmd = get_cmd(f_input_file, f_output_file)
f_proc = subprocess.Popen(f_cmd)
f_proc.communicate()
if f_close_checkbox.isChecked():
f_window.close()
QMessageBox.warning(
self,
_("Success"),
_("Created file(s)"),
)
def cancel_handler():
f_window.close()
def set_output_file_name():
if not str(f_output_name.text()):
f_file = str(f_name.text())
if f_file:
f_file_name = f_file.rsplit(".")[0] + self.ac_ext
f_output_name.setText(f_file_name)
def file_name_select():
try:
if not os.path.isdir(self.last_ac_dir):
self.last_ac_dir = HOME
if f_batch_checkbox.isChecked():
f_dir = QFileDialog.getExistingDirectory(
MAIN_WINDOW,
_("Open Folder"),
self.last_ac_dir,
options=QFileDialog.Option.DontUseNativeDialog,
)
if f_dir is None:
return
f_dir = str(f_dir)
if not f_dir:
return
f_name.setText(f_dir)
self.last_ac_dir = f_dir
else:
f_file_name, f_filter = QFileDialog.getOpenFileName(
MAIN_WINDOW,
_("Select a file name to save to..."),
self.last_ac_dir,
filter=_("Audio Files {}").format(
"(*.wav *.{})".format(a_label)
),
options=QFileDialog.Option.DontUseNativeDialog,
)
if f_file_name and str(f_file_name):
f_name.setText(str(f_file_name))
self.last_ac_dir = os.path.dirname(f_file_name)
if f_file_name.lower().endswith(".{}".format(a_label)):
f_wav_radiobutton.setChecked(True)
elif f_file_name.lower().endswith(".wav"):
f_mp3_radiobutton.setChecked(True)
set_output_file_name()
self.last_ac_dir = os.path.dirname(f_file_name)
except Exception as ex:
show_generic_exception(ex)
def file_name_select_output():
try:
if not os.path.isdir(self.last_ac_dir):
self.last_ac_dir = HOME
if f_batch_checkbox.isChecked():
f_dir = QFileDialog.getExistingDirectory(
MAIN_WINDOW,
_("Open Folder"),
self.last_ac_dir,
options=QFileDialog.Option.DontUseNativeDialog,
)
if f_dir is None:
return
f_dir = str(f_dir)
if not f_dir:
return
f_output_name.setText(f_dir)
self.last_ac_dir = f_dir
else:
f_file_name, f_filter = QFileDialog.getSaveFileName(
MAIN_WINDOW,
_("Select a file name to save to..."),
self.last_ac_dir,
options=QFileDialog.Option.DontUseNativeDialog,
)
if f_file_name and str(f_file_name):
f_file_name = str(f_file_name)
if not f_file_name.endswith(self.ac_ext):
f_file_name += self.ac_ext
f_output_name.setText(f_file_name)
self.last_ac_dir = os.path.dirname(f_file_name)
except Exception as ex:
LOG.exception(ex)
def format_changed(a_val=None):
if f_wav_radiobutton.isChecked():
self.ac_ext = ".wav"
else:
self.ac_ext = ".{}".format(a_label)
if not f_batch_checkbox.isChecked():
f_str = str(f_output_name.text()).strip()
if f_str and not f_str.endswith(self.ac_ext):
f_arr = f_str.rsplit(".")
f_output_name.setText(f_arr[0] + self.ac_ext)
def batch_changed(a_val=None):
if a_val:
f_name.setToolTip("The folder containing files to be converted")
f_output_name.setToolTip("The folder to place converted files in")
else:
f_name.setToolTip("The file to be converted")
f_output_name.setToolTip("The name of the converted file")
f_name.setText("")
f_output_name.setText("")
self.ac_ext = ".wav"
f_window = QDialog(MAIN_WINDOW)
f_window.setWindowTitle(_("{} Converter".format(a_label)))
vlayout = QVBoxLayout()
f_layout = QGridLayout()
vlayout.addLayout(f_layout)
f_window.setLayout(vlayout)
f_name = QLineEdit()
f_name.setReadOnly(True)
f_name.setMinimumWidth(480)
f_layout.addWidget(QLabel(_("Input:")), 0, 0)
f_layout.addWidget(f_name, 0, 1)
f_select_file = QPushButton(_("Select"))
f_select_file.pressed.connect(file_name_select)
f_layout.addWidget(f_select_file, 0, 2)
f_output_name = QLineEdit()
f_output_name.setReadOnly(True)
f_output_name.setMinimumWidth(480)
f_layout.addWidget(QLabel(_("Output:")), 1, 0)
f_layout.addWidget(f_output_name, 1, 1)
f_select_file_output = QPushButton(_("Select"))
f_select_file_output.pressed.connect(file_name_select_output)
f_layout.addWidget(f_select_file_output, 1, 2)
f_layout.addWidget(QLabel(_("Convert to:")), 2, 1)
f_rb_group = QButtonGroup()
f_wav_radiobutton = QRadioButton("wav")
f_wav_radiobutton.setChecked(True)
f_rb_group.addButton(f_wav_radiobutton)
f_wav_layout = QHBoxLayout()
f_wav_layout.addWidget(f_wav_radiobutton)
f_layout.addLayout(f_wav_layout, 3, 1)
f_wav_radiobutton.toggled.connect(format_changed)
f_mp3_radiobutton = QRadioButton(a_label)
f_rb_group.addButton(f_mp3_radiobutton)
f_mp3_layout = QHBoxLayout()
f_mp3_layout.addWidget(f_mp3_radiobutton)
f_mp3_radiobutton.toggled.connect(format_changed)
f_mp3_br_combobox = QComboBox()
f_mp3_br_combobox.addItems(["320", "256", "192", "160", "128"])
f_mp3_layout.addWidget(QLabel(_("Bitrate")))
f_mp3_layout.addWidget(f_mp3_br_combobox)
f_layout.addLayout(f_mp3_layout, 4, 1)
f_batch_checkbox = QCheckBox(_("Batch convert entire folder?"))
f_batch_checkbox.stateChanged.connect(batch_changed)
batch_changed(0)
f_layout.addWidget(f_batch_checkbox, 6, 1)
f_close_checkbox = QCheckBox("Close on finish?")
f_close_checkbox.setChecked(True)
f_layout.addWidget(f_close_checkbox, 9, 1)
f_ok_layout = QHBoxLayout()
f_ok_layout.addItem(
QSpacerItem(
10,
10,
QSizePolicy.Policy.Expanding,
QSizePolicy.Policy.Minimum,
),
)
f_ok = QPushButton(_("OK"))
f_ok.setMinimumWidth(75)
f_ok.pressed.connect(ok_handler)
f_ok_layout.addWidget(f_ok)
vlayout.addLayout(f_ok_layout)
f_cancel = QPushButton(_("Cancel"))
f_cancel.setMinimumWidth(75)
f_cancel.pressed.connect(cancel_handler)
f_ok_layout.addWidget(f_cancel)
f_status_label = QLabel("")
f_layout.addWidget(f_status_label, 15, 1)
f_window.exec()
def on_offline_render(self):
shared.PLUGIN_UI_DICT.save_all_plugin_state()
self.current_window.on_offline_render()
def on_undo(self):
self.current_window.on_undo()
def on_redo(self):
self.current_window.on_redo()
def final_gc(a_print=True):
"""Brute-force garbage collect all possible objects to
prevent the infamous PyQt SEGFAULT-on-exit...
"""
LOG.info("Called final_gc")
f_last_unreachable = gc.collect()
if not f_last_unreachable:
if a_print:
LOG.info("Successfully garbage collected all objects")
return
for f_i in range(2, 12):
time.sleep(0.1)
f_unreachable = gc.collect()
if f_unreachable == 0:
if a_print:
LOG.info(
"Successfully garbage collected all objects "
"in {} iterations".format(f_i)
)
return
elif f_unreachable >= f_last_unreachable:
break
else:
f_last_unreachable = f_unreachable
if a_print:
LOG.warning(
"gc.collect() returned {} unreachable objects "
"after {} iterations".format(f_unreachable, f_i)
)
def flush_events():
LOG.info("Called flush_events")
for f_i in range(5):
shared.APP.processEvents()
time.sleep(0.1)
def global_close_all():
shared.PLUGIN_UI_DICT.close_all_plugin_windows()
close_engine()
for f_module in shared.HOST_MODULES:
f_module.global_close_all()
def global_ui_refresh_callback(a_restore_all=False):
"""Use this to re-open all existing items/sequences/song in
their editors when the files have been changed externally
"""
for f_module in shared.HOST_MODULES:
f_module.global_ui_refresh_callback(a_restore_all)
# Opens or creates a new project
def global_open_project(a_project_file, a_wait=True):
# TODO: SG DEPRECATED
global PROJECT_FILE
PROJECT_FILE = a_project_file
open_engine(a_project_file, get_fps())
constants.PROJECT = sg_project.SgProject()
constants.PROJECT.suppress_updates = True
constants.PROJECT.open_project(a_project_file, False)
constants.PROJECT.suppress_updates = False
try:
constants.PROJECT.create_backup()
except Exception as ex:
LOG.error("constants.PROJECT.create_backup() failed")
LOG.exception(ex)
shared.PLUGIN_UI_DICT = SgPluginUiDict(
constants.PROJECT,
constants.IPC,
)
for f_module in shared.HOST_MODULES:
f_module.global_open_project(a_project_file)
open_bookmarks()
def global_new_project(a_project_file, a_wait=True):
# TODO: SG DEPRECATED
global PROJECT_FILE
PROJECT_FILE = a_project_file
constants.PROJECT = sg_project.SgProject()
constants.PROJECT.new_project(a_project_file)
MAIN_WINDOW.last_offline_dir = constants.PROJECT.user_folder
shared.PLUGIN_UI_DICT = SgPluginUiDict(
constants.PROJECT,
constants.IPC,
)
for f_module in shared.HOST_MODULES:
f_module.global_new_project(a_project_file)
open_engine(a_project_file, get_fps())
open_bookmarks()
def splash_screen_opening(project_file):
if len(project_file) > 100:
f_msg = "Opening ..." + project_file[-100:]
else:
f_msg = "Opening " + project_file
SPLASH_SCREEN.status_update(f_msg)
def _load_project(project_file):
if project_file:
set_project(project_file)
else:
project_file = util.get_file_setting("last-project", str, None)
if not project_file:
project_file = os.path.join(
constants.DEFAULT_PROJECT_DIR,
"default-project",
f"{constants.MAJOR_VERSION}.project",
)
LOG.info(f"No default project at '{project_file}'")
if os.path.exists(project_file) and not os.access(
os.path.dirname(project_file), os.W_OK
):
QMessageBox.warning(
MAIN_WINDOW,
_("Error"),
_(
"You do not have read+write permissions to {}, please correct "
"this and restart Stargate"
).format(
os.path.dirname(project_file),
),
)
MAIN_WINDOW.prepare_to_quit()
splash_screen_opening(project_file)
if os.path.exists(project_file):
try:
check_project_version(
MAIN_WINDOW,
project_file,
)
global_open_project(project_file)
except StargateProjectVersionError:
exit(1)
except Exception as ex:
LOG.exception(ex)
QMessageBox.warning(
MAIN_WINDOW,
_("Error"),
_(
"Error opening project, check the logs for details. "
"If the problem persists, you may need to use the "
"project recovery tool on the welcome screen"
),
)
MAIN_WINDOW.prepare_to_quit()
else:
global_new_project(project_file)
def main(
splash_screen,
project_file,
):
global MAIN_WINDOW, SPLASH_SCREEN
scaler = ui_scaler_factory()
major_version = util.META_DOT_JSON["version"]["major"]
minor_version = util.META_DOT_JSON["version"]["minor"]
LOG.info(f"Starting {major_version}-{minor_version}:{util.COMMIT_HASH}")
log_screen_info()
SPLASH_SCREEN = splash_screen
widgets.knob_setup()
QPixmapCache.setCacheLimit(1024 * 1024)
MAIN_WINDOW = SgMainWindow()
# Ensure that the engine is not running before trying to access
# audio hardware
pid = check_engine()
if pid:
f_answer = QMessageBox.question(
None,
_("Warning"),
sg_strings.multiple_instances_warning,
buttons=(QMessageBox.StandardButton.Ok | QMessageBox.StandardButton.Cancel),
)
if f_answer == QMessageBox.StandardButton.Cancel:
sys.exit(1)
kill_engine(pid)
MAIN_WINDOW.setup(scaler)
shared.APP.lastWindowClosed.connect(shared.APP.quit)
if not os.access(HOME, os.W_OK):
QMessageBox.warning(
MAIN_WINDOW,
_("Error"),
_(
"You do not have read+write permissions to {}, please correct "
"this and restart Stargate"
).format(HOME),
)
MAIN_WINDOW.prepare_to_quit()
_load_project(project_file)
shared.set_window_title()
SPLASH_SCREEN.status_update(_("Loading project..."))
for i in range(600):
if constants.READY or util.ENGINE_RETCODE is not None:
break
time.sleep(0.05)
QApplication.processEvents()
if i < 15:
SPLASH_SCREEN.status_update(_("Showing the main window"))
time.sleep((20 - i) * 0.1)
if util.ENGINE_RETCODE is not None:
handle_engine_error(util.ENGINE_RETCODE)
if util.ENGINE_RETCODE == 1003:
shared.IGNORE_CLOSE_EVENT = False
MAIN_WINDOW.prepare_to_quit()
# Workaround for weird stuff happening in Windows during initialization
constants.IPC_ENABLED = True
return MAIN_WINDOW
|
string | splitters | # Copyright 2004-2009 Joe Wreschnig, Michael Urman, Steven Robertson
# 2011-2018 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import re
from quodlibet.util import re_escape
DEFAULT_TAG_SPLITTERS = ["/", "&", ","]
DEFAULT_SUB_SPLITTERS = ["\u301c\u301c", "\uff08\uff09", "[]", "()", "~~", "--"]
def split_value(s, splitters=DEFAULT_TAG_SPLITTERS):
"""Splits a string. The first match in 'splitters' is used as the
separator; subsequent matches are intentionally ignored.
"""
def regex_for(sp):
return r"{start}\s*{split}\s*{end}".format(
start=r"(?:\b|(?<=\W))", split=re_escape(sp), end=r"(?:\b|(?=\W))"
)
if not splitters:
return [s.strip()]
values = s.split("\n")
for spl in splitters:
spl = re.compile(regex_for(spl), re.UNICODE)
if any(spl.search(v) for v in values):
return [st.strip() for v in values for st in spl.split(v)]
return values
def find_subtitle(title, delimiters=DEFAULT_SUB_SPLITTERS):
if isinstance(title, bytes):
title = title.decode("utf-8", "replace")
for pair in delimiters:
if len(pair) == 2 and pair[0] in title[:-1] and title.endswith(pair[1]):
r = len(pair[1])
l = title[0:-r].rindex(pair[0])
if l:
subtitle = title[l + len(pair[0]) : -r]
return title[:l].rstrip(), subtitle
else:
return title, None
def split_title(
s, tag_splitters=DEFAULT_TAG_SPLITTERS, sub_splitters=DEFAULT_SUB_SPLITTERS
):
title, subtitle = find_subtitle(s, sub_splitters)
return (
(title.strip(), split_value(subtitle, tag_splitters)) if subtitle else (s, [])
)
__FEATURING = ["feat.", "featuring", "feat", "ft", "ft.", "with", "w/"]
__ORIGINALLY = ["originally by ", " cover"]
# Cache case-insensitive regex searches of the above
__FEAT_REGEX = [re.compile(re_escape(s + " "), re.I) for s in __FEATURING]
__ORIG_REGEX = [re.compile(re_escape(s), re.I) for s in __ORIGINALLY]
def split_people(
s, tag_splitters=DEFAULT_TAG_SPLITTERS, sub_splitters=DEFAULT_SUB_SPLITTERS
):
title, subtitle = find_subtitle(s, sub_splitters)
if not subtitle:
parts = s.split(" ")
if len(parts) > 2:
for feat in __FEATURING:
try:
i = [p.lower() for p in parts].index(feat)
orig = " ".join(parts[:i])
others = " ".join(parts[i + 1 :])
return orig, split_value(others, tag_splitters)
except (ValueError, IndexError):
pass
return s, []
else:
old = subtitle
# TODO: allow multiple substitutions across types, maybe
for regex in __FEAT_REGEX + __ORIG_REGEX:
subtitle = re.sub(regex, "", subtitle, 1)
if old != subtitle:
# Only change once
break
values = split_value(subtitle, tag_splitters)
return title.strip(), values
def split_album(s, sub_splitters=DEFAULT_SUB_SPLITTERS):
name, disc = find_subtitle(s, sub_splitters)
if not disc:
parts = s.split(" ")
if len(parts) > 2:
lower = parts[-2].lower()
if "disc" in lower or "disk" in lower:
return " ".join(parts[:-2]), parts[-1]
return s, None
else:
parts = disc.split()
if len(parts) == 2 and parts[0].lower() in [
"disc",
"disk",
"cd",
"vol",
"vol.",
]:
try:
return name, parts[1]
except IndexError:
return s, None
else:
return s, None
|
blocks | qa_tagged_stream_mux | #!/usr/bin/env python
#
# Copyright 2013-2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import numpy
import pmt
from gnuradio import blocks, gr, gr_unittest
from gnuradio.gr import packet_utils
def make_tag(key, value, offset, srcid=None):
tag = gr.tag_t()
tag.key = pmt.string_to_symbol(key)
tag.value = pmt.to_pmt(value)
tag.offset = offset
if srcid is not None:
tag.srcid = pmt.to_pmt(srcid)
return tag
class qa_tagged_stream_mux(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
self.tsb_key = "tsb_key"
def tearDown(self):
self.tb = None
def setup_data_tags(self, data):
return packet_utils.packets_to_vectors(data, self.tsb_key)
def test_1(self):
packets0 = (
(0, 1, 2),
(5, 6),
(10,),
(
14,
15,
16,
),
)
packets1 = ((3, 4), (7, 8, 9), (11, 12, 13), (17,))
expected = [
[0, 1, 2, 3, 4],
[5, 6, 7, 8, 9],
[10, 11, 12, 13],
[14, 15, 16, 17],
]
data0, tags0 = self.setup_data_tags(packets0)
data1, tags1 = self.setup_data_tags(packets1)
tags0.append(make_tag("spam", 42, 4))
tags1.append(make_tag("eggs", 23, 3))
src0 = blocks.vector_source_b(data0, tags=tags0)
src1 = blocks.vector_source_b(data1, tags=tags1)
tagged_stream_mux = blocks.tagged_stream_mux(gr.sizeof_char, self.tsb_key)
snk = blocks.tsb_vector_sink_b(tsb_key=self.tsb_key)
self.tb.connect(src0, (tagged_stream_mux, 0))
self.tb.connect(src1, (tagged_stream_mux, 1))
self.tb.connect(tagged_stream_mux, snk)
self.tb.run()
# Check
self.assertEqual(expected, snk.data())
tags = [gr.tag_to_python(x) for x in snk.tags()]
tags = sorted([(x.offset, x.key, x.value) for x in tags])
tags_expected = [
(6, "spam", 42),
(8, "eggs", 23),
]
self.assertEqual(tags, tags_expected)
def test_preserve_tag_head_pos(self):
"""Test the 'preserve head position' function.
This will add a 'special' tag to item 0 on stream 1.
It should be on item 0 of the output stream."""
packet_len_0 = 5
data0 = list(range(packet_len_0))
packet_len_1 = 3
data1 = list(range(packet_len_1))
mux = blocks.tagged_stream_mux(
gr.sizeof_float,
self.tsb_key,
1, # Mark port 1 as carrying special tags on the head position
)
sink = blocks.tsb_vector_sink_f(tsb_key=self.tsb_key)
self.tb.connect(
blocks.vector_source_f(data0),
blocks.stream_to_tagged_stream(
gr.sizeof_float, 1, packet_len_0, self.tsb_key
),
(mux, 0),
)
self.tb.connect(
blocks.vector_source_f(
list(range(packet_len_1)), tags=(make_tag("spam", "eggs", 0),)
),
blocks.stream_to_tagged_stream(
gr.sizeof_float, 1, packet_len_1, self.tsb_key
),
(mux, 1),
)
self.tb.connect(mux, sink)
self.tb.run()
self.assertEqual(len(sink.data()), 1)
self.assertEqual(sink.data()[0], data0 + data1)
self.assertEqual(len(sink.tags()), 1)
tag = gr.tag_to_python(sink.tags()[0])
tag = (tag.offset, tag.key, tag.value)
tag_expected = (0, "spam", "eggs")
self.assertEqual(tag, tag_expected)
if __name__ == "__main__":
gr_unittest.run(qa_tagged_stream_mux)
|
model | api_token | # encoding: utf-8
from __future__ import annotations
import copy
import datetime
from secrets import token_urlsafe
from typing import Any, Optional
from ckan.common import config
from ckan.model import DomainObject, User, meta
from sqlalchemy import Column, ForeignKey, Table, orm, types
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.ext.mutable import MutableDict
from typing_extensions import Self
__all__ = ["ApiToken", "api_token_table"]
def _make_token() -> str:
nbytes = config.get("api_token.nbytes")
return token_urlsafe(nbytes)
api_token_table = Table(
"api_token",
meta.metadata,
Column("id", types.UnicodeText, primary_key=True, default=_make_token),
Column("name", types.UnicodeText),
Column("user_id", types.UnicodeText, ForeignKey("user.id")),
Column("created_at", types.DateTime, default=datetime.datetime.utcnow),
Column("last_access", types.DateTime, nullable=True),
Column("plugin_extras", MutableDict.as_mutable(JSONB)),
)
class ApiToken(DomainObject):
id: str
name: str
user_id: Optional[str]
created_at: datetime.datetime
last_access: Optional[datetime.datetime]
plugin_extras: dict[str, Any]
owner: Optional[User]
def __init__(self, user_id: Optional[str] = None, name: str = "Unnamed") -> None:
self.id = _make_token()
self.user_id = user_id
self.name = name
@classmethod
def get(cls, id: Optional[str]) -> Optional[Self]:
if not id:
return None
return meta.Session.query(cls).get(id)
@classmethod
def revoke(cls, id: Optional[str]) -> bool:
token = cls.get(id)
if token:
meta.Session.delete(token)
meta.Session.commit()
return True
return False
def touch(self, commit: bool = False) -> None:
self.last_access = datetime.datetime.utcnow()
if commit:
meta.Session.commit()
def set_extra(self, key: str, value: Any, commit: bool = False) -> None:
extras = self.plugin_extras or {}
extras[key] = value
self.plugin_extras = copy.deepcopy(extras)
if commit:
meta.Session.commit()
meta.mapper(
ApiToken,
api_token_table,
properties={
"owner": orm.relation(
User, backref=orm.backref("api_tokens", cascade="all, delete")
)
},
)
|
sabnzbd | sabtraylinux | #!/usr/bin/python3 -OO
# Copyright 2007-2023 The SABnzbd-Team (sabnzbd.org)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
sabnzbd.sabtraylinux - System tray icon for Linux, inspired from the Windows one
"""
import logging
import gi
from gi.repository import GLib, Gtk
try:
gi.require_version("XApp", "1.0")
from gi.repository import XApp
if not hasattr(XApp, "StatusIcon"):
raise ImportError
HAVE_XAPP = True
logging.debug("XApp found: %s" % XApp)
except Exception:
HAVE_XAPP = False
logging.debug("XApp not available, falling back to Gtk.StatusIcon")
import subprocess
from os.path import abspath
from threading import Thread
from time import sleep
import sabnzbd
import sabnzbd.api as api
import sabnzbd.cfg as cfg
from sabnzbd.misc import to_units
from sabnzbd.panic import launch_a_browser
class StatusIcon(Thread):
sabicons = {
"default": abspath("icons/logo-arrow.svg"),
"green": abspath("icons/logo-arrow_green.svg"),
"pause": abspath("icons/logo-arrow_gray.svg"),
}
updatefreq = 1000 # ms
def __init__(self):
self.mythread = Thread(target=self.dowork)
self.mythread.start()
def dowork(self):
# Wait for translated texts to be loaded
while not sabnzbd.WEBUI_READY:
sleep(0.2)
logging.debug("language file not loaded, waiting")
self.sabpaused = False
if HAVE_XAPP:
self.statusicon = XApp.StatusIcon()
else:
self.statusicon = Gtk.StatusIcon()
self.statusicon.set_name("SABnzbd")
self.statusicon.set_visible(True)
self.icon = self.sabicons["default"]
self.refresh_icon()
self.tooltip = "SABnzbd %s" % sabnzbd.__version__
self.refresh_tooltip()
if HAVE_XAPP:
self.statusicon.connect("activate", self.right_click_event)
else:
self.statusicon.connect("popup-menu", self.right_click_event)
GLib.timeout_add(self.updatefreq, self.run)
Gtk.main()
def refresh_icon(self):
if HAVE_XAPP:
# icon path must be absolute in XApp
self.statusicon.set_icon_name(self.icon)
else:
self.statusicon.set_from_file(self.icon)
def refresh_tooltip(self):
self.statusicon.set_tooltip_text(self.tooltip)
# run this every updatefreq ms
def run(self):
self.sabpaused, bytes_left, bpsnow, time_left = api.fast_queue()
mb_left = to_units(bytes_left)
speed = to_units(bpsnow)
if self.sabpaused:
self.tooltip = T("Paused")
self.icon = self.sabicons["pause"]
elif bytes_left > 0:
self.tooltip = "%sB/s %s: %sB (%s)" % (
speed,
T("Remaining"),
mb_left,
time_left,
)
self.icon = self.sabicons["green"]
else:
self.tooltip = T("Idle")
self.icon = self.sabicons["default"]
self.refresh_icon()
self.tooltip = "SABnzbd %s\n%s" % (sabnzbd.__version__, self.tooltip)
self.refresh_tooltip()
return 1
def right_click_event(self, icon, button, time):
"""menu"""
menu = Gtk.Menu()
maddnzb = Gtk.MenuItem(label=T("Add NZB"))
mshowinterface = Gtk.MenuItem(label=T("Show interface"))
mopencomplete = Gtk.MenuItem(label=T("Open complete folder"))
mrss = Gtk.MenuItem(label=T("Read all RSS feeds"))
if self.sabpaused:
mpauseresume = Gtk.MenuItem(label=T("Resume"))
else:
mpauseresume = Gtk.MenuItem(label=T("Pause"))
mrestart = Gtk.MenuItem(label=T("Restart"))
mshutdown = Gtk.MenuItem(label=T("Shutdown"))
maddnzb.connect("activate", self.addnzb)
mshowinterface.connect("activate", self.browse)
mopencomplete.connect("activate", self.opencomplete)
mrss.connect("activate", self.rss)
mpauseresume.connect("activate", self.pauseresume)
mrestart.connect("activate", self.restart)
mshutdown.connect("activate", self.shutdown)
menu.append(maddnzb)
menu.append(mshowinterface)
menu.append(mopencomplete)
menu.append(mrss)
menu.append(mpauseresume)
menu.append(mrestart)
menu.append(mshutdown)
menu.show_all()
menu.popup(None, None, None, self.statusicon, button, time)
def addnzb(self, icon):
"""menu handlers"""
dialog = Gtk.FileChooserDialog(
title="SABnzbd - " + T("Add NZB"), action=Gtk.FileChooserAction.OPEN
)
dialog.add_buttons(
Gtk.STOCK_CANCEL,
Gtk.ResponseType.CANCEL,
Gtk.STOCK_OPEN,
Gtk.ResponseType.OK,
)
dialog.set_select_multiple(True)
filter = Gtk.FileFilter()
filter.set_name("*.nzb,*.gz,*.bz2,*.zip,*.rar,*.7z")
filter.add_pattern("*.nzb")
filter.add_pattern("*.gz")
filter.add_pattern("*.bz2")
filter.add_pattern("*.zip")
filter.add_pattern("*.rar")
filter.add_pattern("*.7z")
dialog.add_filter(filter)
response = dialog.run()
if response == Gtk.ResponseType.OK:
for filename in dialog.get_filenames():
sabnzbd.nzbparser.add_nzbfile(filename)
dialog.destroy()
def opencomplete(self, icon):
subprocess.Popen(["xdg-open", cfg.complete_dir.get_path()])
def browse(self, icon):
launch_a_browser(sabnzbd.BROWSER_URL, True)
def pauseresume(self, icon):
if self.sabpaused:
self.resume()
else:
self.pause()
def restart(self, icon):
self.hover_text = T("Restart")
sabnzbd.trigger_restart()
def shutdown(self, icon):
self.hover_text = T("Shutdown")
sabnzbd.shutdown_program()
def pause(self):
sabnzbd.Scheduler.plan_resume(0)
sabnzbd.Downloader.pause()
def resume(self):
sabnzbd.Scheduler.plan_resume(0)
sabnzbd.downloader.unpause_all()
def rss(self, icon):
sabnzbd.Scheduler.force_rss()
|
femsolver | writerbase | # ***************************************************************************
# * Copyright (c) 2016 Bernd Hahnebach <bernd@bimstatik.org> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD FEM solver writer base object"
__author__ = "Bernd Hahnebach"
__url__ = "https://www.freecad.org"
## \addtogroup FEM
# @{
import os
from os.path import join
import FreeCAD
from femmesh import meshsetsgetter
class FemInputWriter:
def __init__(
self,
analysis_obj,
solver_obj,
mesh_obj,
member,
dir_name=None,
mat_geo_sets=None,
):
# class attributes from parameter values
self.analysis = analysis_obj
self.solver_obj = solver_obj
self.mesh_object = mesh_obj
self.member = member
# more attributes
self.analysis_type = self.solver_obj.AnalysisType
self.document = self.analysis.Document
# working dir
# if dir_name was not given or if it exists but is not empty: create a temporary dir
# Purpose: makes sure the analysis can be run even on wired situation
make_tmp_dir = False
if dir_name is None:
FreeCAD.Console.PrintWarning(
"Error: The working_dir in base input file writer class was not set. "
"A temporary directory is used.\n"
)
make_tmp_dir = True
elif not os.path.isdir(dir_name):
FreeCAD.Console.PrintWarning(
"Error: The working_dir: '{}' given to "
"base input file writer class does not exist. "
"A temporary directory is used.\n".format(dir_name)
)
make_tmp_dir = True
if make_tmp_dir is True:
from tempfile import mkdtemp
dir_name = mkdtemp(prefix="fcfem_")
FreeCAD.Console.PrintWarning(
"The working directory '{}' was created and will be used.".format(
dir_name
)
)
self.dir_name = dir_name
# new class attributes
self.fc_ver = FreeCAD.Version()
self.ccx_nall = "Nall"
self.ccx_eall = "Eall"
self.ccx_evolumes = "Evolumes"
self.ccx_efaces = "Efaces"
self.ccx_eedges = "Eedges"
self.mat_geo_sets = mat_geo_sets
if self.mesh_object:
self.femmesh = self.mesh_object.FemMesh
else:
FreeCAD.Console.PrintWarning(
"No finite element mesh object was given to the writer class. "
"In rare cases this might not be an error. "
)
# *************************************************
# deprecated, leave for compatibility reasons
# if these are calculated here they are calculated twice :-(
self.femnodes_mesh = {}
self.femelement_table = {}
self.constraint_conflict_nodes = []
self.femnodes_ele_table = {}
self.femelements_edges_only = []
self.femelements_faces_only = []
self.femelement_volumes_table = {}
self.femelement_faces_table = {}
self.femelement_edges_table = {}
self.femelement_count_test = True
# deprecated, leave for compatibility reasons
# do not add new objects
# only the ones which exists on 0.19 release are kept
# materials
self.material_objects = member.mats_linear
self.material_nonlinear_objects = member.mats_nonlinear
# geometries
self.beamsection_objects = member.geos_beamsection
self.beamrotation_objects = member.geos_beamrotation
self.fluidsection_objects = member.geos_fluidsection
self.shellthickness_objects = member.geos_shellthickness
# constraints
self.contact_objects = member.cons_contact
self.displacement_objects = member.cons_displacement
self.fixed_objects = member.cons_fixed
self.force_objects = member.cons_force
self.heatflux_objects = member.cons_heatflux
self.initialtemperature_objects = member.cons_initialtemperature
self.planerotation_objects = member.cons_planerotation
self.pressure_objects = member.cons_pressure
self.selfweight_objects = member.cons_selfweight
self.temperature_objects = member.cons_temperature
self.tie_objects = member.cons_tie
self.transform_objects = member.cons_transform
# meshdatagetter, for compatibility, same with all getter methods
self.meshdatagetter = meshsetsgetter.MeshSetsGetter(
self.analysis,
self.solver_obj,
self.mesh_object,
self.member,
)
# ********************************************************************************************
# ********************************************************************************************
# generic writer for constraints mesh sets and constraints property data
# write constraint node sets, constraint face sets, constraint element sets
def write_constraints_meshsets(self, f, femobjs, con_module):
if not femobjs:
return
analysis_types = con_module.get_analysis_types()
if analysis_types != "all" and self.analysis_type not in analysis_types:
return
def constraint_sets_loop_writing(the_file, femobjs, write_before, write_after):
if write_before != "":
the_file.write(write_before)
for femobj in femobjs:
# femobj --> dict, FreeCAD document object is femobj["Object"]
the_obj = femobj["Object"]
the_file.write("** {}\n".format(the_obj.Label))
con_module.write_meshdata_constraint(the_file, femobj, the_obj, self)
if write_after != "":
the_file.write(write_after)
write_before = con_module.get_before_write_meshdata_constraint()
write_after = con_module.get_after_write_meshdata_constraint()
# write sets to file
write_name = con_module.get_sets_name()
f.write("\n{}\n".format(59 * "*"))
f.write("** {}\n".format(write_name.replace("_", " ")))
if self.split_inpfile is True:
file_name_split = "{}_{}.inp".format(self.mesh_name, write_name)
f.write("*INCLUDE,INPUT={}\n".format(file_name_split))
inpfile_split = open(join(self.dir_name, file_name_split), "w")
constraint_sets_loop_writing(
inpfile_split, femobjs, write_before, write_after
)
inpfile_split.close()
else:
constraint_sets_loop_writing(f, femobjs, write_before, write_after)
# write constraint property data
def write_constraints_propdata(self, f, femobjs, con_module):
if not femobjs:
return
analysis_types = con_module.get_analysis_types()
if analysis_types != "all" and self.analysis_type not in analysis_types:
return
write_before = con_module.get_before_write_constraint()
write_after = con_module.get_after_write_constraint()
# write constraint to file
f.write("\n{}\n".format(59 * "*"))
f.write("** {}\n".format(con_module.get_constraint_title()))
if write_before != "":
f.write(write_before)
for femobj in femobjs:
# femobj --> dict, FreeCAD document object is femobj["Object"]
the_obj = femobj["Object"]
f.write("** {}\n".format(the_obj.Label))
con_module.write_constraint(f, femobj, the_obj, self)
if write_after != "":
f.write(write_after)
# ********************************************************************************************
# deprecated, do not add new constraints
# only the ones which exists on 0.19 release are kept
def get_constraints_fixed_nodes(self):
self.meshdatagetter.get_constraints_fixed_nodes()
def get_constraints_displacement_nodes(self):
self.meshdatagetter.get_constraints_displacement_nodes()
def get_constraints_planerotation_nodes(self):
self.meshdatagetter.get_constraints_planerotation_nodes()
def get_constraints_transform_nodes(self):
self.meshdatagetter.get_constraints_transform_nodes()
def get_constraints_temperature_nodes(self):
self.meshdatagetter.get_constraints_temperature_nodes()
def get_constraints_fluidsection_nodes(self):
self.meshdatagetter.get_constraints_fluidsection_nodes()
def get_constraints_force_nodeloads(self):
self.meshdatagetter.get_constraints_force_nodeloads()
def get_constraints_pressure_faces(self):
self.meshdatagetter.get_constraints_pressure_faces()
def get_constraints_contact_faces(self):
self.meshdatagetter.get_constraints_contact_faces()
def get_constraints_tie_faces(self):
self.meshdatagetter.get_constraints_tie_faces()
def get_constraints_heatflux_faces(self):
self.meshdatagetter.get_constraints_heatflux_faces()
## @}
|
migrations | 0002_squashed_initial | # Generated by Django 3.2.5 on 2022-05-31 14:46
import django.db.models.deletion
import django.db.models.manager
import django_migration_linter as linter
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("user_management", "0001_squashed_initial"),
("alerts", "0001_squashed_initial"),
("slack", "0002_squashed_initial"),
("telegram", "0001_squashed_initial"),
("schedules", "0001_squashed_initial"),
]
operations = [
linter.IgnoreMigration(),
migrations.AddField(
model_name="userhasnotification",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="user_management.user"
),
),
migrations.AddField(
model_name="resolutionnoteslackmessage",
name="added_by_user",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="added_resolution_note_slack_messages",
to="user_management.user",
),
),
migrations.AddField(
model_name="resolutionnoteslackmessage",
name="alert_group",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="resolution_note_slack_messages",
to="alerts.alertgroup",
),
),
migrations.AddField(
model_name="resolutionnoteslackmessage",
name="user",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="authored_resolution_note_slack_messages",
to="user_management.user",
),
),
migrations.AddField(
model_name="resolutionnote",
name="alert_group",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="resolution_notes",
to="alerts.alertgroup",
),
),
migrations.AddField(
model_name="resolutionnote",
name="author",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="authored_resolution_notes",
to="user_management.user",
),
),
migrations.AddField(
model_name="resolutionnote",
name="resolution_note_slack_message",
field=models.OneToOneField(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="resolution_note",
to="alerts.resolutionnoteslackmessage",
),
),
migrations.AddField(
model_name="invitation",
name="alert_group",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="invitations",
to="alerts.alertgroup",
),
),
migrations.AddField(
model_name="invitation",
name="author",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="author_of_invitations",
to="user_management.user",
),
),
migrations.AddField(
model_name="invitation",
name="invitee",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="invitee_in_invitations",
to="user_management.user",
),
),
migrations.AddField(
model_name="grafanaalertingcontactpoint",
name="alert_receive_channel",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="contact_points",
to="alerts.alertreceivechannel",
),
),
migrations.AddField(
model_name="escalationpolicy",
name="custom_button_trigger",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="escalation_policies",
to="alerts.custombutton",
),
),
migrations.AddField(
model_name="escalationpolicy",
name="escalation_chain",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="escalation_policies",
to="alerts.escalationchain",
),
),
migrations.AddField(
model_name="escalationpolicy",
name="last_notified_user",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="last_notified_in_escalation_policies",
to="user_management.user",
),
),
migrations.AddField(
model_name="escalationpolicy",
name="notify_schedule",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="escalation_policies",
to="schedules.oncallschedule",
),
),
migrations.AddField(
model_name="escalationpolicy",
name="notify_to_group",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="slack.slackusergroup",
),
),
migrations.AddField(
model_name="escalationpolicy",
name="notify_to_users_queue",
field=models.ManyToManyField(to="user_management.User"),
),
migrations.AddField(
model_name="escalationchain",
name="organization",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="escalation_chains",
to="user_management.organization",
),
),
migrations.AddField(
model_name="escalationchain",
name="team",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="escalation_chains",
to="user_management.team",
),
),
migrations.AddField(
model_name="custombutton",
name="organization",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="custom_buttons",
to="user_management.organization",
),
),
migrations.AddField(
model_name="custombutton",
name="team",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="custom_buttons",
to="user_management.team",
),
),
migrations.AddField(
model_name="channelfilter",
name="alert_receive_channel",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="channel_filters",
to="alerts.alertreceivechannel",
),
),
migrations.AddField(
model_name="channelfilter",
name="escalation_chain",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="channel_filters",
to="alerts.escalationchain",
),
),
migrations.AddField(
model_name="channelfilter",
name="telegram_channel",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="channel_filter",
to="telegram.telegramtoorganizationconnector",
),
),
migrations.AddField(
model_name="alertreceivechannel",
name="author",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="alert_receive_channels",
to="user_management.user",
),
),
migrations.AddField(
model_name="alertreceivechannel",
name="maintenance_author",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="alertreceivechannel_maintenances_created",
to="user_management.user",
),
),
migrations.AddField(
model_name="alertreceivechannel",
name="organization",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="alert_receive_channels",
to="user_management.organization",
),
),
migrations.AddField(
model_name="alertreceivechannel",
name="team",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="alert_receive_channels",
to="user_management.team",
),
),
migrations.AddField(
model_name="alertgrouppostmortem",
name="alert_group",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="postmortem_text",
to="alerts.alertgroup",
),
),
migrations.AddField(
model_name="alertgrouplogrecord",
name="alert_group",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="log_records",
to="alerts.alertgroup",
),
),
migrations.AddField(
model_name="alertgrouplogrecord",
name="author",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="log_records",
to="user_management.user",
),
),
migrations.AddField(
model_name="alertgrouplogrecord",
name="custom_button",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
related_name="log_records",
to="alerts.custombutton",
),
),
migrations.AddField(
model_name="alertgrouplogrecord",
name="dependent_alert_group",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="dependent_log_records",
to="alerts.alertgroup",
),
),
migrations.AddField(
model_name="alertgrouplogrecord",
name="escalation_policy",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="log_records",
to="alerts.escalationpolicy",
),
),
migrations.AddField(
model_name="alertgrouplogrecord",
name="invitation",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="log_records",
to="alerts.invitation",
),
),
migrations.AddField(
model_name="alertgrouplogrecord",
name="root_alert_group",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="root_log_records",
to="alerts.alertgroup",
),
),
migrations.AddField(
model_name="alertgroupcounter",
name="organization",
field=models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
to="user_management.organization",
),
),
migrations.AddField(
model_name="alertgroup",
name="acknowledged_by_user",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="user_management.user",
),
),
migrations.AddField(
model_name="alertgroup",
name="channel",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="alert_groups",
to="alerts.alertreceivechannel",
),
),
migrations.AddField(
model_name="alertgroup",
name="channel_filter",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
related_name="alert_groups",
to="alerts.channelfilter",
),
),
migrations.AddField(
model_name="alertgroup",
name="resolved_by_alert",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="resolved_alert_groups",
to="alerts.alert",
),
),
migrations.AddField(
model_name="alertgroup",
name="resolved_by_user",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="resolved_alert_groups",
to="user_management.user",
),
),
migrations.AddField(
model_name="alertgroup",
name="root_alert_group",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="dependent_alert_groups",
to="alerts.alertgroup",
),
),
migrations.AddField(
model_name="alertgroup",
name="silenced_by_user",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="silenced_alert_groups",
to="user_management.user",
),
),
migrations.AddField(
model_name="alertgroup",
name="slack_log_message",
field=models.OneToOneField(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="slack.slackmessage",
),
),
migrations.AddField(
model_name="alertgroup",
name="slack_message",
field=models.OneToOneField(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="_alert_group",
to="slack.slackmessage",
),
),
migrations.AddField(
model_name="alertgroup",
name="wiped_by",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="wiped_by_user",
to="user_management.user",
),
),
migrations.AddField(
model_name="alert",
name="group",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="alerts",
to="alerts.alertgroup",
),
),
migrations.CreateModel(
name="AlertForAlertManager",
fields=[],
options={
"proxy": True,
"indexes": [],
"constraints": [],
},
bases=("alerts.alert",),
),
migrations.CreateModel(
name="AlertGroupForAlertManager",
fields=[],
options={
"proxy": True,
"indexes": [],
"constraints": [],
},
bases=("alerts.alertgroup",),
managers=[
("all_objects", django.db.models.manager.Manager()),
],
),
migrations.AlterUniqueTogether(
name="userhasnotification",
unique_together={("user", "alert_group")},
),
migrations.AlterUniqueTogether(
name="resolutionnoteslackmessage",
unique_together={("thread_ts", "ts")},
),
migrations.AlterUniqueTogether(
name="escalationchain",
unique_together={("organization", "name")},
),
migrations.AlterUniqueTogether(
name="custombutton",
unique_together={("name", "organization")},
),
migrations.AddConstraint(
model_name="alertreceivechannel",
constraint=models.UniqueConstraint(
fields=("organization", "verbal_name", "deleted_at"),
name="unique integration name",
),
),
migrations.AlterUniqueTogether(
name="alertgroup",
unique_together={
(
"channel_id",
"channel_filter_id",
"distinction",
"is_open_for_grouping",
)
},
),
]
|
views | util | # encoding: utf-8
import ckan.lib.base as base
import ckan.lib.helpers as h
from ckan.common import _, request
from ckan.types import Response
from flask import Blueprint
util = Blueprint("util", __name__)
def internal_redirect() -> Response:
"""Redirect to the url parameter.
Only internal URLs are allowed"""
url = request.form.get("url") or request.args.get("url")
if not url:
base.abort(400, _("Missing Value") + ": url")
url = url.replace("\r", " ").replace("\n", " ").replace("\0", " ")
if h.url_is_local(url):
return h.redirect_to(url)
else:
base.abort(403, _("Redirecting to external site is not allowed."))
def primer() -> str:
"""Render all HTML components out onto a single page.
This is useful for development/styling of CKAN."""
return base.render("development/primer.html")
util.add_url_rule(
"/util/redirect",
view_func=internal_redirect,
methods=(
"GET",
"POST",
),
)
util.add_url_rule("/testing/primer", view_func=primer)
|
builtinContextMenus | additionsExportSelection | import gui.mainFrame
import wx
from gui.contextMenu import ContextMenuSelection
from gui.utils.clipboard import toClipboard
from service.fit import Fit
from service.port.eft import (
exportBoosters,
exportCargo,
exportDrones,
exportFighters,
exportImplants,
)
_t = wx.GetTranslation
class AdditionsExportAll(ContextMenuSelection):
visibilitySetting = "additionsCopyPaste"
def __init__(self):
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.viewSpecMap = {
"droneItemMisc": (_t("Drones"), exportDrones),
"fighterItemMisc": (_t("Fighters"), exportFighters),
"cargoItemMisc": (_t("Cargo Items"), exportCargo),
"implantItemMisc": (_t("Implants"), exportImplants),
"implantItemMiscChar": (_t("Implants"), exportImplants),
"boosterItemMisc": (_t("Boosters"), exportBoosters),
}
def display(self, callingWindow, srcContext, selection):
if srcContext not in self.viewSpecMap:
return False
if not selection:
return False
fit = Fit.getInstance().getFit(self.mainFrame.getActiveFit())
if fit is None:
return False
self.srcContext = srcContext
return True
def getText(self, callingWindow, itmContext, selection):
return _t("Copy Selected {}").format(self.viewSpecMap[self.srcContext][0])
def activate(self, callingWindow, fullContext, selection, i):
export = self.viewSpecMap[self.srcContext][1](selection)
if export:
toClipboard(export)
AdditionsExportAll.register()
|
cura | BuildVolume | # Copyright (c) 2021 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import math
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, cast
import numpy
from cura.Scene.CuraSceneNode import CuraSceneNode
from cura.Settings.ExtruderManager import ExtruderManager
from cura.Settings.GlobalStack import GlobalStack
from PyQt6.QtCore import QTimer
from UM.Application import Application # To modify the maximum zoom level.
from UM.i18n import i18nCatalog
from UM.Logger import Logger
from UM.Math.AxisAlignedBox import AxisAlignedBox
from UM.Math.Color import Color
from UM.Math.Matrix import Matrix
from UM.Math.Polygon import Polygon
from UM.Math.Vector import Vector
from UM.Mesh.MeshBuilder import MeshBuilder
from UM.Mesh.MeshData import MeshData
from UM.Message import Message
from UM.Resources import Resources
from UM.Scene.Iterator.BreadthFirstIterator import BreadthFirstIterator
from UM.Scene.Platform import Platform
from UM.Scene.SceneNode import SceneNode
from UM.Signal import Signal
from UM.View.GL.OpenGL import OpenGL
from UM.View.RenderBatch import RenderBatch
if TYPE_CHECKING:
from cura.CuraApplication import CuraApplication
from cura.Settings.ExtruderStack import ExtruderStack
from UM.Settings.ContainerStack import ContainerStack
catalog = i18nCatalog("cura")
# Radius of disallowed area in mm around prime. I.e. how much distance to keep from prime position.
PRIME_CLEARANCE = 6.5
class BuildVolume(SceneNode):
"""Build volume is a special kind of node that is responsible for rendering the printable area & disallowed areas."""
raftThicknessChanged = Signal()
def __init__(
self, application: "CuraApplication", parent: Optional[SceneNode] = None
) -> None:
super().__init__(parent)
self._application = application
self._machine_manager = self._application.getMachineManager()
self._volume_outline_color = None # type: Optional[Color]
self._x_axis_color = None # type: Optional[Color]
self._y_axis_color = None # type: Optional[Color]
self._z_axis_color = None # type: Optional[Color]
self._disallowed_area_color = None # type: Optional[Color]
self._error_area_color = None # type: Optional[Color]
self._width = 0 # type: float
self._height = 0 # type: float
self._depth = 0 # type: float
self._shape = "" # type: str
self._scale_vector = Vector(1.0, 1.0, 1.0)
self._shader = None
self._origin_mesh = None # type: Optional[MeshData]
self._origin_line_length = 20
self._origin_line_width = 1
self._enabled = False
self._grid_mesh = None # type: Optional[MeshData]
self._grid_shader = None
self._disallowed_areas = [] # type: List[Polygon]
self._disallowed_areas_no_brim = [] # type: List[Polygon]
self._disallowed_area_mesh = None # type: Optional[MeshData]
self._disallowed_area_size = 0.0
self._error_areas = [] # type: List[Polygon]
self._error_mesh = None # type: Optional[MeshData]
self.setCalculateBoundingBox(False)
self._volume_aabb = None # type: Optional[AxisAlignedBox]
self._raft_thickness = 0.0
self._extra_z_clearance = 0.0
self._adhesion_type = None # type: Any
self._platform = Platform(self)
self._edge_disallowed_size = None
self._build_volume_message = Message(
catalog.i18nc(
"@info:status",
"The build volume height has been reduced due to the value of the"
' "Print Sequence" setting to prevent the gantry from colliding'
" with printed models.",
),
title=catalog.i18nc("@info:title", "Build Volume"),
message_type=Message.MessageType.WARNING,
)
self._global_container_stack = None # type: Optional[GlobalStack]
self._stack_change_timer = QTimer()
self._stack_change_timer.setInterval(100)
self._stack_change_timer.setSingleShot(True)
self._stack_change_timer.timeout.connect(self._onStackChangeTimerFinished)
self._application.globalContainerStackChanged.connect(self._onStackChanged)
self._engine_ready = False
self._application.engineCreatedSignal.connect(self._onEngineCreated)
self._has_errors = False
self._application.getController().getScene().sceneChanged.connect(
self._onSceneChanged
)
# Objects loaded at the moment. We are connected to the property changed events of these objects.
self._scene_objects = set() # type: Set[SceneNode]
self._scene_change_timer = QTimer()
self._scene_change_timer.setInterval(200)
self._scene_change_timer.setSingleShot(True)
self._scene_change_timer.timeout.connect(self._onSceneChangeTimerFinished)
self._setting_change_timer = QTimer()
self._setting_change_timer.setInterval(150)
self._setting_change_timer.setSingleShot(True)
self._setting_change_timer.timeout.connect(self._onSettingChangeTimerFinished)
# Must be after setting _build_volume_message, apparently that is used in getMachineManager.
# activeQualityChanged is always emitted after setActiveVariant, setActiveMaterial and setActiveQuality.
# Therefore this works.
self._machine_manager.activeQualityChanged.connect(self._onStackChanged)
# Enable and disable extruder
self._machine_manager.extruderChanged.connect(self.updateNodeBoundaryCheck)
# List of settings which were updated
self._changed_settings_since_last_rebuild = [] # type: List[str]
def _onSceneChanged(self, source):
if self._global_container_stack:
# Ignore anything that is not something we can slice in the first place!
if source.callDecoration("isSliceable"):
self._scene_change_timer.start()
def _onSceneChangeTimerFinished(self):
root = self._application.getController().getScene().getRoot()
new_scene_objects = set(
node
for node in BreadthFirstIterator(root)
if node.callDecoration("isSliceable")
)
if new_scene_objects != self._scene_objects:
for node in (
new_scene_objects - self._scene_objects
): # Nodes that were added to the scene.
self._updateNodeListeners(node)
node.decoratorsChanged.connect(
self._updateNodeListeners
) # Make sure that decoration changes afterwards also receive the same treatment
for node in (
self._scene_objects - new_scene_objects
): # Nodes that were removed from the scene.
per_mesh_stack = node.callDecoration("getStack")
if per_mesh_stack:
per_mesh_stack.propertyChanged.disconnect(
self._onSettingPropertyChanged
)
active_extruder_changed = node.callDecoration(
"getActiveExtruderChangedSignal"
)
if active_extruder_changed is not None:
node.callDecoration("getActiveExtruderChangedSignal").disconnect(
self._updateDisallowedAreasAndRebuild
)
node.decoratorsChanged.disconnect(self._updateNodeListeners)
self.rebuild()
self._scene_objects = new_scene_objects
self._onSettingPropertyChanged(
"print_sequence", "value"
) # Create fake event, so right settings are triggered.
def _updateNodeListeners(self, node: SceneNode):
"""Updates the listeners that listen for changes in per-mesh stacks.
:param node: The node for which the decorators changed.
"""
per_mesh_stack = node.callDecoration("getStack")
if per_mesh_stack:
per_mesh_stack.propertyChanged.connect(self._onSettingPropertyChanged)
active_extruder_changed = node.callDecoration("getActiveExtruderChangedSignal")
if active_extruder_changed is not None:
active_extruder_changed.connect(self._updateDisallowedAreasAndRebuild)
def setWidth(self, width: float) -> None:
self._width = width
def getWidth(self) -> float:
return self._width
def setHeight(self, height: float) -> None:
self._height = height
def getHeight(self) -> float:
return self._height
def setDepth(self, depth: float) -> None:
self._depth = depth
def getDepth(self) -> float:
return self._depth
def setShape(self, shape: str) -> None:
if shape:
self._shape = shape
def getShape(self) -> str:
return self._shape
def getDiagonalSize(self) -> float:
"""Get the length of the 3D diagonal through the build volume.
This gives a sense of the scale of the build volume in general.
:return: length of the 3D diagonal through the build volume
"""
return math.sqrt(
self._width * self._width
+ self._height * self._height
+ self._depth * self._depth
)
def getDisallowedAreas(self) -> List[Polygon]:
return self._disallowed_areas
def getDisallowedAreasNoBrim(self) -> List[Polygon]:
return self._disallowed_areas_no_brim
def setDisallowedAreas(self, areas: List[Polygon]):
self._disallowed_areas = areas
def render(self, renderer):
if not self.getMeshData() or not self.isVisible():
return True
if not self._shader:
self._shader = OpenGL.getInstance().createShaderProgram(
Resources.getPath(Resources.Shaders, "default.shader")
)
self._grid_shader = OpenGL.getInstance().createShaderProgram(
Resources.getPath(Resources.Shaders, "grid.shader")
)
theme = self._application.getTheme()
self._grid_shader.setUniformValue(
"u_plateColor", Color(*theme.getColor("buildplate").getRgb())
)
self._grid_shader.setUniformValue(
"u_gridColor0", Color(*theme.getColor("buildplate_grid").getRgb())
)
self._grid_shader.setUniformValue(
"u_gridColor1", Color(*theme.getColor("buildplate_grid_minor").getRgb())
)
renderer.queueNode(self, mode=RenderBatch.RenderMode.Lines)
renderer.queueNode(self, mesh=self._origin_mesh, backface_cull=True)
renderer.queueNode(
self, mesh=self._grid_mesh, shader=self._grid_shader, backface_cull=True
)
if self._disallowed_area_mesh:
renderer.queueNode(
self,
mesh=self._disallowed_area_mesh,
shader=self._shader,
transparent=True,
backface_cull=True,
sort=-9,
)
if self._error_mesh:
renderer.queueNode(
self,
mesh=self._error_mesh,
shader=self._shader,
transparent=True,
backface_cull=True,
sort=-8,
)
return True
def updateNodeBoundaryCheck(self):
"""For every sliceable node, update node._outside_buildarea"""
if not self._global_container_stack:
return
root = self._application.getController().getScene().getRoot()
nodes = cast(List[SceneNode], list(cast(Iterable, BreadthFirstIterator(root))))
group_nodes = [] # type: List[SceneNode]
build_volume_bounding_box = self.getBoundingBox()
if build_volume_bounding_box:
# It's over 9000!
# We set this to a very low number, as we do allow models to intersect the build plate.
# This means the model gets cut off at the build plate.
build_volume_bounding_box = build_volume_bounding_box.set(bottom=-9001)
else:
# No bounding box. This is triggered when running Cura from command line with a model for the first time
# In that situation there is a model, but no machine (and therefore no build volume.
return
for node in nodes:
# Need to check group nodes later
if node.callDecoration("isGroup"):
group_nodes.append(node) # Keep list of affected group_nodes
if node.callDecoration("isSliceable") or node.callDecoration("isGroup"):
if not isinstance(node, CuraSceneNode):
continue
if node.collidesWithBbox(build_volume_bounding_box):
node.setOutsideBuildArea(True)
continue
if node.collidesWithAreas(self.getDisallowedAreas()):
node.setOutsideBuildArea(True)
continue
# If the entire node is below the build plate, still mark it as outside.
node_bounding_box = node.getBoundingBox()
if (
node_bounding_box
and node_bounding_box.top < 0
and not node.getParent().callDecoration("isGroup")
):
node.setOutsideBuildArea(True)
continue
# Mark the node as outside build volume if the set extruder is disabled
extruder_position = node.callDecoration("getActiveExtruderPosition")
try:
if not self._global_container_stack.extruderList[
int(extruder_position)
].isEnabled and not node.callDecoration("isGroup"):
node.setOutsideBuildArea(True)
continue
except IndexError: # Happens when the extruder list is too short. We're not done building the printer in memory yet.
continue
except TypeError: # Happens when extruder_position is None. This object has no extruder decoration.
continue
node.setOutsideBuildArea(False)
# Group nodes should override the _outside_buildarea property of their children.
for group_node in group_nodes:
children = group_node.getAllChildren()
# Check if one or more children are non-printable and if so, set the parent as non-printable:
for child_node in children:
if child_node.isOutsideBuildArea():
group_node.setOutsideBuildArea(True)
break
# Apply results of the check to all children of the group:
for child_node in children:
child_node.setOutsideBuildArea(group_node.isOutsideBuildArea())
def checkBoundsAndUpdate(
self, node: CuraSceneNode, bounds: Optional[AxisAlignedBox] = None
) -> None:
"""Update the outsideBuildArea of a single node, given bounds or current build volume
:param node: single node
:param bounds: bounds or current build volume
"""
if not isinstance(node, CuraSceneNode) or self._global_container_stack is None:
return
if bounds is None:
build_volume_bounding_box = self.getBoundingBox()
if build_volume_bounding_box:
# It's over 9000!
build_volume_bounding_box = build_volume_bounding_box.set(bottom=-9001)
else:
# No bounding box. This is triggered when running Cura from command line with a model for the first time
# In that situation there is a model, but no machine (and therefore no build volume.
return
else:
build_volume_bounding_box = bounds
if node.callDecoration("isSliceable") or node.callDecoration("isGroup"):
if node.collidesWithBbox(build_volume_bounding_box):
node.setOutsideBuildArea(True)
return
if node.collidesWithAreas(self.getDisallowedAreas()):
node.setOutsideBuildArea(True)
return
# Mark the node as outside build volume if the set extruder is disabled
extruder_position = node.callDecoration("getActiveExtruderPosition")
try:
if not self._global_container_stack.extruderList[
int(extruder_position)
].isEnabled:
node.setOutsideBuildArea(True)
return
except IndexError:
# If the extruder doesn't exist, also mark it as unprintable.
node.setOutsideBuildArea(True)
return
node.setOutsideBuildArea(False)
def _buildGridMesh(
self,
min_w: float,
max_w: float,
min_h: float,
max_h: float,
min_d: float,
max_d: float,
z_fight_distance: float,
) -> MeshData:
mb = MeshBuilder()
if self._shape != "elliptic":
# Build plate grid mesh
mb.addQuad(
Vector(min_w, min_h - z_fight_distance, min_d),
Vector(max_w, min_h - z_fight_distance, min_d),
Vector(max_w, min_h - z_fight_distance, max_d),
Vector(min_w, min_h - z_fight_distance, max_d),
)
for n in range(0, 6):
v = mb.getVertex(n)
mb.setVertexUVCoordinates(n, v[0], v[2])
return mb.build()
else:
aspect = 1.0
scale_matrix = Matrix()
if self._width != 0:
# Scale circular meshes by aspect ratio if width != height
aspect = self._depth / self._width
scale_matrix.compose(scale=Vector(1, 1, aspect))
mb.addVertex(0, min_h - z_fight_distance, 0)
mb.addArc(
max_w, Vector.Unit_Y, center=Vector(0, min_h - z_fight_distance, 0)
)
sections = mb.getVertexCount() - 1 # Center point is not an arc section
indices = []
for n in range(0, sections - 1):
indices.append([0, n + 2, n + 1])
mb.addIndices(numpy.asarray(indices, dtype=numpy.int32))
mb.calculateNormals()
for n in range(0, mb.getVertexCount()):
v = mb.getVertex(n)
mb.setVertexUVCoordinates(n, v[0], v[2] * aspect)
return mb.build().getTransformed(scale_matrix)
def _buildMesh(
self,
min_w: float,
max_w: float,
min_h: float,
max_h: float,
min_d: float,
max_d: float,
z_fight_distance: float,
) -> MeshData:
if self._shape != "elliptic":
# Outline 'cube' of the build volume
mb = MeshBuilder()
mb.addLine(
Vector(min_w, min_h, min_d),
Vector(max_w, min_h, min_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(min_w, min_h, min_d),
Vector(min_w, max_h, min_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(min_w, max_h, min_d),
Vector(max_w, max_h, min_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(max_w, min_h, min_d),
Vector(max_w, max_h, min_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(min_w, min_h, max_d),
Vector(max_w, min_h, max_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(min_w, min_h, max_d),
Vector(min_w, max_h, max_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(min_w, max_h, max_d),
Vector(max_w, max_h, max_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(max_w, min_h, max_d),
Vector(max_w, max_h, max_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(min_w, min_h, min_d),
Vector(min_w, min_h, max_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(max_w, min_h, min_d),
Vector(max_w, min_h, max_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(min_w, max_h, min_d),
Vector(min_w, max_h, max_d),
color=self._volume_outline_color,
)
mb.addLine(
Vector(max_w, max_h, min_d),
Vector(max_w, max_h, max_d),
color=self._volume_outline_color,
)
return mb.build()
else:
# Bottom and top 'ellipse' of the build volume
scale_matrix = Matrix()
if self._width != 0:
# Scale circular meshes by aspect ratio if width != height
aspect = self._depth / self._width
scale_matrix.compose(scale=Vector(1, 1, aspect))
mb = MeshBuilder()
mb.addArc(
max_w,
Vector.Unit_Y,
center=(0, min_h - z_fight_distance, 0),
color=self._volume_outline_color,
)
mb.addArc(
max_w,
Vector.Unit_Y,
center=(0, max_h, 0),
color=self._volume_outline_color,
)
return mb.build().getTransformed(scale_matrix)
def _buildOriginMesh(self, origin: Vector) -> MeshData:
mb = MeshBuilder()
mb.addCube(
width=self._origin_line_length,
height=self._origin_line_width,
depth=self._origin_line_width,
center=origin + Vector(self._origin_line_length / 2, 0, 0),
color=self._x_axis_color,
)
mb.addCube(
width=self._origin_line_width,
height=self._origin_line_length,
depth=self._origin_line_width,
center=origin + Vector(0, self._origin_line_length / 2, 0),
color=self._y_axis_color,
)
mb.addCube(
width=self._origin_line_width,
height=self._origin_line_width,
depth=self._origin_line_length,
center=origin - Vector(0, 0, self._origin_line_length / 2),
color=self._z_axis_color,
)
return mb.build()
def _updateColors(self):
theme = self._application.getTheme()
if theme is None:
return
self._volume_outline_color = Color(*theme.getColor("volume_outline").getRgb())
self._x_axis_color = Color(*theme.getColor("x_axis").getRgb())
self._y_axis_color = Color(*theme.getColor("y_axis").getRgb())
self._z_axis_color = Color(*theme.getColor("z_axis").getRgb())
self._disallowed_area_color = Color(*theme.getColor("disallowed_area").getRgb())
self._error_area_color = Color(*theme.getColor("error_area").getRgb())
def _buildErrorMesh(
self,
min_w: float,
max_w: float,
min_h: float,
max_h: float,
min_d: float,
max_d: float,
disallowed_area_height: float,
) -> Optional[MeshData]:
if not self._error_areas:
return None
mb = MeshBuilder()
for error_area in self._error_areas:
color = self._error_area_color
points = error_area.getPoints()
first = Vector(
self._clamp(points[0][0], min_w, max_w),
disallowed_area_height,
self._clamp(points[0][1], min_d, max_d),
)
previous_point = Vector(
self._clamp(points[0][0], min_w, max_w),
disallowed_area_height,
self._clamp(points[0][1], min_d, max_d),
)
for point in points:
new_point = Vector(
self._clamp(point[0], min_w, max_w),
disallowed_area_height,
self._clamp(point[1], min_d, max_d),
)
mb.addFace(first, previous_point, new_point, color=color)
previous_point = new_point
return mb.build()
def _buildDisallowedAreaMesh(
self,
min_w: float,
max_w: float,
min_h: float,
max_h: float,
min_d: float,
max_d: float,
disallowed_area_height: float,
) -> Optional[MeshData]:
if not self._disallowed_areas:
return None
mb = MeshBuilder()
color = self._disallowed_area_color
for polygon in self._disallowed_areas:
points = polygon.getPoints()
if len(points) == 0:
continue
first = Vector(
self._clamp(points[0][0], min_w, max_w),
disallowed_area_height,
self._clamp(points[0][1], min_d, max_d),
)
previous_point = Vector(
self._clamp(points[0][0], min_w, max_w),
disallowed_area_height,
self._clamp(points[0][1], min_d, max_d),
)
for point in points:
new_point = Vector(
self._clamp(point[0], min_w, max_w),
disallowed_area_height,
self._clamp(point[1], min_d, max_d),
)
mb.addFace(first, previous_point, new_point, color=color)
previous_point = new_point
# Find the largest disallowed area to exclude it from the maximum scale bounds.
# This is a very nasty hack. This pretty much only works for UM machines.
# This disallowed area_size needs a -lot- of rework at some point in the future: TODO
if (
numpy.min(points[:, 1]) >= 0
): # This filters out all areas that have points to the left of the centre. This is done to filter the skirt area.
size = abs(numpy.max(points[:, 1]) - numpy.min(points[:, 1]))
else:
size = 0
self._disallowed_area_size = max(size, self._disallowed_area_size)
return mb.build()
def _updateScaleFactor(self) -> None:
if not self._global_container_stack:
return
scale_xy = 100.0 / max(
100.0,
self._global_container_stack.getProperty(
"material_shrinkage_percentage_xy", "value"
),
)
scale_z = 100.0 / max(
100.0,
self._global_container_stack.getProperty(
"material_shrinkage_percentage_z", "value"
),
)
self._scale_vector = Vector(scale_xy, scale_xy, scale_z)
def rebuild(self) -> None:
"""Recalculates the build volume & disallowed areas."""
if not self._width or not self._height or not self._depth:
return
if not self._engine_ready:
return
if not self._global_container_stack:
return
if not self._volume_outline_color:
self._updateColors()
min_w = -self._width / 2
max_w = self._width / 2
min_h = 0.0
max_h = self._height
min_d = -self._depth / 2
max_d = self._depth / 2
z_fight_distance = 0.2 # Distance between buildplate and disallowed area meshes to prevent z-fighting
self._grid_mesh = self._buildGridMesh(
min_w, max_w, min_h, max_h, min_d, max_d, z_fight_distance
)
self.setMeshData(
self._buildMesh(min_w, max_w, min_h, max_h, min_d, max_d, z_fight_distance)
)
# Indication of the machine origin
if self._global_container_stack.getProperty("machine_center_is_zero", "value"):
origin = (Vector(min_w, min_h, min_d) + Vector(max_w, min_h, max_d)) / 2
else:
origin = Vector(min_w, min_h, max_d)
self._origin_mesh = self._buildOriginMesh(origin)
disallowed_area_height = 0.1
self._disallowed_area_size = 0.0
self._disallowed_area_mesh = self._buildDisallowedAreaMesh(
min_w, max_w, min_h, max_h, min_d, max_d, disallowed_area_height
)
self._error_mesh = self._buildErrorMesh(
min_w, max_w, min_h, max_h, min_d, max_d, disallowed_area_height
)
self._updateScaleFactor()
self._volume_aabb = AxisAlignedBox(
minimum=Vector(min_w, min_h - 1.0, min_d),
maximum=Vector(
max_w, max_h - self._raft_thickness - self._extra_z_clearance, max_d
),
)
bed_adhesion_size = self.getEdgeDisallowedSize()
# As this works better for UM machines, we only add the disallowed_area_size for the z direction.
# This is probably wrong in all other cases. TODO!
# The +1 and -1 is added as there is always a bit of extra room required to work properly.
scale_to_max_bounds = AxisAlignedBox(
minimum=Vector(
min_w + bed_adhesion_size + 1,
min_h,
min_d + self._disallowed_area_size - bed_adhesion_size + 1,
),
maximum=Vector(
max_w - bed_adhesion_size - 1,
max_h - self._raft_thickness - self._extra_z_clearance,
max_d - self._disallowed_area_size + bed_adhesion_size - 1,
),
)
self._application.getController().getScene()._maximum_bounds = (
scale_to_max_bounds # type: ignore
)
self.updateNodeBoundaryCheck()
def getBoundingBox(self) -> Optional[AxisAlignedBox]:
return self._volume_aabb
def getRaftThickness(self) -> float:
return self._raft_thickness
def _updateRaftThickness(self) -> None:
if not self._global_container_stack:
return
old_raft_thickness = self._raft_thickness
if self._global_container_stack.extruderList:
# This might be called before the extruder stacks have initialised, in which case getting the adhesion_type fails
self._adhesion_type = self._global_container_stack.getProperty(
"adhesion_type", "value"
)
self._raft_thickness = 0.0
if self._adhesion_type == "raft":
self._raft_thickness = (
self._global_container_stack.getProperty("raft_base_thickness", "value")
+ self._global_container_stack.getProperty(
"raft_interface_layers", "value"
)
* self._global_container_stack.getProperty(
"raft_interface_thickness", "value"
)
+ self._global_container_stack.getProperty(
"raft_surface_layers", "value"
)
* self._global_container_stack.getProperty(
"raft_surface_thickness", "value"
)
+ self._global_container_stack.getProperty("raft_airgap", "value")
- self._global_container_stack.getProperty("layer_0_z_overlap", "value")
)
# Rounding errors do not matter, we check if raft_thickness has changed at all
if old_raft_thickness != self._raft_thickness:
self.setPosition(
Vector(0, -self._raft_thickness, 0), SceneNode.TransformSpace.World
)
self.raftThicknessChanged.emit()
def _calculateExtraZClearance(self, extruders: List["ContainerStack"]) -> float:
if not self._global_container_stack:
return 0
extra_z = 0.0
for extruder in extruders:
if extruder.getProperty("retraction_hop_enabled", "value"):
retraction_hop = extruder.getProperty("retraction_hop", "value")
if extra_z is None or retraction_hop > extra_z:
extra_z = retraction_hop
return extra_z
def _onStackChanged(self):
self._stack_change_timer.start()
def _onStackChangeTimerFinished(self) -> None:
"""Update the build volume visualization"""
if self._global_container_stack:
self._global_container_stack.propertyChanged.disconnect(
self._onSettingPropertyChanged
)
extruders = ExtruderManager.getInstance().getActiveExtruderStacks()
for extruder in extruders:
extruder.propertyChanged.disconnect(self._onSettingPropertyChanged)
self._global_container_stack = self._application.getGlobalContainerStack()
if self._global_container_stack:
self._global_container_stack.propertyChanged.connect(
self._onSettingPropertyChanged
)
extruders = ExtruderManager.getInstance().getActiveExtruderStacks()
for extruder in extruders:
extruder.propertyChanged.connect(self._onSettingPropertyChanged)
self._width = self._global_container_stack.getProperty(
"machine_width", "value"
)
machine_height = self._global_container_stack.getProperty(
"machine_height", "value"
)
if (
self._global_container_stack.getProperty("print_sequence", "value")
== "one_at_a_time"
and len(self._scene_objects) > 1
):
new_height = min(
self._global_container_stack.getProperty("gantry_height", "value")
* self._scale_vector.z,
machine_height,
)
if self._height > new_height:
self._build_volume_message.show()
elif self._height < new_height:
self._build_volume_message.hide()
self._height = new_height
else:
self._height = self._global_container_stack.getProperty(
"machine_height", "value"
)
self._build_volume_message.hide()
self._depth = self._global_container_stack.getProperty(
"machine_depth", "value"
)
self._shape = self._global_container_stack.getProperty(
"machine_shape", "value"
)
self._updateDisallowedAreas()
self._updateRaftThickness()
self._extra_z_clearance = self._calculateExtraZClearance(
ExtruderManager.getInstance().getUsedExtruderStacks()
)
if self._engine_ready:
self.rebuild()
camera = Application.getInstance().getController().getCameraTool()
if camera:
diagonal = self.getDiagonalSize()
if diagonal > 1:
# You can zoom out up to 5 times the diagonal. This gives some space around the volume.
camera.setZoomRange(min=0.1, max=diagonal * 5) # type: ignore
def _onEngineCreated(self) -> None:
self._engine_ready = True
self.rebuild()
def _onSettingChangeTimerFinished(self) -> None:
if not self._global_container_stack:
return
rebuild_me = False
update_disallowed_areas = False
update_raft_thickness = False
update_extra_z_clearance = True
for setting_key in self._changed_settings_since_last_rebuild:
if setting_key == "print_sequence":
machine_height = self._global_container_stack.getProperty(
"machine_height", "value"
)
if (
self._application.getGlobalContainerStack().getProperty(
"print_sequence", "value"
)
== "one_at_a_time"
and len(self._scene_objects) > 1
):
new_height = min(
self._global_container_stack.getProperty(
"gantry_height", "value"
)
* self._scale_vector.z,
machine_height,
)
if self._height > new_height:
self._build_volume_message.show()
elif self._height < new_height:
self._build_volume_message.hide()
self._height = new_height
else:
self._height = (
self._global_container_stack.getProperty(
"machine_height", "value"
)
* self._scale_vector.z
)
self._build_volume_message.hide()
update_disallowed_areas = True
# sometimes the machine size or shape settings are adjusted on the active machine, we should reflect this
if (
setting_key in self._machine_settings
or setting_key in self._material_size_settings
):
self._updateMachineSizeProperties()
update_extra_z_clearance = True
update_disallowed_areas = True
if setting_key in self._disallowed_area_settings:
update_disallowed_areas = True
if setting_key in self._raft_settings:
update_raft_thickness = True
if setting_key in self._extra_z_settings:
update_extra_z_clearance = True
if setting_key in self._limit_to_extruder_settings:
update_disallowed_areas = True
rebuild_me = (
update_extra_z_clearance
or update_disallowed_areas
or update_raft_thickness
)
# We only want to update all of them once.
if update_disallowed_areas:
self._updateDisallowedAreas()
if update_raft_thickness:
self._updateRaftThickness()
if update_extra_z_clearance:
self._extra_z_clearance = self._calculateExtraZClearance(
ExtruderManager.getInstance().getUsedExtruderStacks()
)
if rebuild_me:
self.rebuild()
# We just did a rebuild, reset the list.
self._changed_settings_since_last_rebuild = []
def _onSettingPropertyChanged(self, setting_key: str, property_name: str) -> None:
if property_name != "value":
return
if setting_key not in self._changed_settings_since_last_rebuild:
self._changed_settings_since_last_rebuild.append(setting_key)
self._setting_change_timer.start()
def hasErrors(self) -> bool:
return self._has_errors
def _updateMachineSizeProperties(self) -> None:
if not self._global_container_stack:
return
self._updateScaleFactor()
self._height = (
self._global_container_stack.getProperty("machine_height", "value")
* self._scale_vector.z
)
self._width = self._global_container_stack.getProperty("machine_width", "value")
self._depth = self._global_container_stack.getProperty("machine_depth", "value")
self._shape = self._global_container_stack.getProperty("machine_shape", "value")
def _updateDisallowedAreasAndRebuild(self):
"""Calls :py:meth:`cura.BuildVolume._updateDisallowedAreas` and makes sure the changes appear in the scene.
This is required for a signal to trigger the update in one go. The
:py:meth:`cura.BuildVolume._updateDisallowedAreas` method itself shouldn't call
:py:meth:`cura.BuildVolume.rebuild`, since there may be other changes before it needs to be rebuilt,
which would hit performance.
"""
self._updateDisallowedAreas()
self._updateRaftThickness()
self._extra_z_clearance = self._calculateExtraZClearance(
ExtruderManager.getInstance().getUsedExtruderStacks()
)
self.rebuild()
def _updateDisallowedAreas(self) -> None:
if not self._global_container_stack:
return
self._error_areas = []
used_extruders = ExtruderManager.getInstance().getUsedExtruderStacks()
self._edge_disallowed_size = None # Force a recalculation
disallowed_border_size = self.getEdgeDisallowedSize()
result_areas = self._computeDisallowedAreasStatic(
disallowed_border_size, used_extruders
) # Normal machine disallowed areas can always be added.
prime_areas = self._computeDisallowedAreasPrimeBlob(
disallowed_border_size, used_extruders
)
result_areas_no_brim = self._computeDisallowedAreasStatic(
0, used_extruders
) # Where the priming is not allowed to happen. This is not added to the result, just for collision checking.
# Check if prime positions intersect with disallowed areas.
for extruder in used_extruders:
extruder_id = extruder.getId()
result_areas[extruder_id].extend(prime_areas[extruder_id])
result_areas_no_brim[extruder_id].extend(prime_areas[extruder_id])
nozzle_disallowed_areas = extruder.getProperty(
"nozzle_disallowed_areas", "value"
)
for area in nozzle_disallowed_areas:
polygon = Polygon(numpy.array(area, numpy.float32))
polygon_disallowed_border = polygon.getMinkowskiHull(
Polygon.approximatedCircle(disallowed_border_size)
)
result_areas[extruder_id].append(
polygon_disallowed_border
) # Don't perform the offset on these.
result_areas_no_brim[extruder_id].append(polygon) # No brim
# Add prime tower location as disallowed area.
if (
len([x for x in used_extruders if x.isEnabled]) > 1
): # No prime tower if only one extruder is enabled
prime_tower_collision = False
prime_tower_areas = self._computeDisallowedAreasPrinted(used_extruders)
for extruder_id in prime_tower_areas:
for area_index, prime_tower_area in enumerate(
prime_tower_areas[extruder_id]
):
for area in result_areas[extruder_id]:
if prime_tower_area.intersectsPolygon(area) is not None:
prime_tower_collision = True
break
if prime_tower_collision: # Already found a collision.
break
if not prime_tower_collision:
result_areas[extruder_id].extend(prime_tower_areas[extruder_id])
result_areas_no_brim[extruder_id].extend(
prime_tower_areas[extruder_id]
)
else:
self._error_areas.extend(prime_tower_areas[extruder_id])
self._has_errors = len(self._error_areas) > 0
self._disallowed_areas = []
for extruder_id in result_areas:
self._disallowed_areas.extend(result_areas[extruder_id])
self._disallowed_areas_no_brim = []
for extruder_id in result_areas_no_brim:
self._disallowed_areas_no_brim.extend(result_areas_no_brim[extruder_id])
def _computeDisallowedAreasPrinted(self, used_extruders):
"""Computes the disallowed areas for objects that are printed with print features.
This means that the brim, travel avoidance and such will be applied to these features.
:return: A dictionary with for each used extruder ID the disallowed areas where that extruder may not print.
"""
result = {}
skirt_brim_extruder: ExtruderStack = None
skirt_brim_extruder_nr = self._global_container_stack.getProperty(
"skirt_brim_extruder_nr", "value"
)
for extruder in used_extruders:
if skirt_brim_extruder_nr == -1:
skirt_brim_extruder = used_extruders[
0
] # The prime tower brim is always printed with the first extruder
elif int(extruder.getProperty("extruder_nr", "value")) == int(
skirt_brim_extruder_nr
):
skirt_brim_extruder = extruder
result[extruder.getId()] = []
# Currently, the only normally printed object is the prime tower.
if self._global_container_stack.getProperty("prime_tower_enable", "value"):
prime_tower_size = self._global_container_stack.getProperty(
"prime_tower_size", "value"
)
machine_width = self._global_container_stack.getProperty(
"machine_width", "value"
)
machine_depth = self._global_container_stack.getProperty(
"machine_depth", "value"
)
prime_tower_x = self._global_container_stack.getProperty(
"prime_tower_position_x", "value"
)
prime_tower_y = -self._global_container_stack.getProperty(
"prime_tower_position_y", "value"
)
if not self._global_container_stack.getProperty(
"machine_center_is_zero", "value"
):
prime_tower_x = (
prime_tower_x - machine_width / 2
) # Offset by half machine_width and _depth to put the origin in the front-left.
prime_tower_y = prime_tower_y + machine_depth / 2
radius = prime_tower_size / 2
prime_tower_area = Polygon.approximatedCircle(radius, num_segments=24)
prime_tower_area = prime_tower_area.translate(
prime_tower_x - radius, prime_tower_y - radius
)
prime_tower_area = prime_tower_area.getMinkowskiHull(
Polygon.approximatedCircle(0)
)
for extruder in used_extruders:
result[extruder.getId()].append(
prime_tower_area
) # The prime tower location is the same for each extruder, regardless of offset.
return result
def _computeDisallowedAreasPrimeBlob(
self, border_size: float, used_extruders: List["ExtruderStack"]
) -> Dict[str, List[Polygon]]:
"""Computes the disallowed areas for the prime blobs.
These are special because they are not subject to things like brim or travel avoidance. They do get a dilute
with the border size though because they may not intersect with brims and such of other objects.
:param border_size: The size with which to offset the disallowed areas due to skirt, brim, travel avoid distance
, etc.
:param used_extruders: The extruder stacks to generate disallowed areas for.
:return: A dictionary with for each used extruder ID the prime areas.
"""
result = {} # type: Dict[str, List[Polygon]]
if not self._global_container_stack:
return result
machine_width = self._global_container_stack.getProperty(
"machine_width", "value"
)
machine_depth = self._global_container_stack.getProperty(
"machine_depth", "value"
)
for extruder in used_extruders:
prime_blob_enabled = extruder.getProperty("prime_blob_enable", "value")
prime_x = extruder.getProperty("extruder_prime_pos_x", "value")
prime_y = -extruder.getProperty("extruder_prime_pos_y", "value")
# Ignore extruder prime position if it is not set or if blob is disabled
if (prime_x == 0 and prime_y == 0) or not prime_blob_enabled:
result[extruder.getId()] = []
continue
if not self._global_container_stack.getProperty(
"machine_center_is_zero", "value"
):
prime_x = (
prime_x - machine_width / 2
) # Offset by half machine_width and _depth to put the origin in the front-left.
prime_y = prime_y + machine_depth / 2
prime_polygon = Polygon.approximatedCircle(PRIME_CLEARANCE)
prime_polygon = prime_polygon.getMinkowskiHull(
Polygon.approximatedCircle(border_size)
)
prime_polygon = prime_polygon.translate(prime_x, prime_y)
result[extruder.getId()] = [prime_polygon]
return result
def _computeDisallowedAreasStatic(
self, border_size: float, used_extruders: List["ExtruderStack"]
) -> Dict[str, List[Polygon]]:
"""Computes the disallowed areas that are statically placed in the machine.
It computes different disallowed areas depending on the offset of the extruder. The resulting dictionary will
therefore have an entry for each extruder that is used.
:param border_size: The size with which to offset the disallowed areas due to skirt, brim, travel avoid distance
, etc.
:param used_extruders: The extruder stacks to generate disallowed areas for.
:return: A dictionary with for each used extruder ID the disallowed areas where that extruder may not print.
"""
# Convert disallowed areas to polygons and dilate them.
machine_disallowed_polygons = []
if self._global_container_stack is None:
return {}
for area in self._global_container_stack.getProperty(
"machine_disallowed_areas", "value"
):
if len(area) == 0:
continue # Numpy doesn't deal well with 0-length arrays, since it can't determine the dimensionality of them.
polygon = Polygon(numpy.array(area, numpy.float32))
polygon = polygon.getMinkowskiHull(Polygon.approximatedCircle(border_size))
machine_disallowed_polygons.append(polygon)
# For certain machines we don't need to compute disallowed areas for each nozzle.
# So we check here and only do the nozzle offsetting if needed.
nozzle_offsetting_for_disallowed_areas = (
self._global_container_stack.getMetaDataEntry(
"nozzle_offsetting_for_disallowed_areas", True
)
)
result = {} # type: Dict[str, List[Polygon]]
for extruder in used_extruders:
extruder_id = extruder.getId()
offset_x = extruder.getProperty("machine_nozzle_offset_x", "value")
if offset_x is None:
offset_x = 0
offset_y = extruder.getProperty("machine_nozzle_offset_y", "value")
if offset_y is None:
offset_y = 0
offset_y = -offset_y # Y direction of g-code is the inverse of Y direction of Cura's scene space.
result[extruder_id] = []
for polygon in machine_disallowed_polygons:
result[extruder_id].append(
polygon.translate(offset_x, offset_y)
) # Compensate for the nozzle offset of this extruder.
# Add the border around the edge of the build volume.
left_unreachable_border = 0
right_unreachable_border = 0
top_unreachable_border = 0
bottom_unreachable_border = 0
# Only do nozzle offsetting if needed
if nozzle_offsetting_for_disallowed_areas:
# The build volume is defined as the union of the area that all extruders can reach, so we need to know
# the relative offset to all extruders.
for (
other_extruder
) in ExtruderManager.getInstance().getActiveExtruderStacks():
other_offset_x = other_extruder.getProperty(
"machine_nozzle_offset_x", "value"
)
if other_offset_x is None:
other_offset_x = 0
other_offset_y = other_extruder.getProperty(
"machine_nozzle_offset_y", "value"
)
if other_offset_y is None:
other_offset_y = 0
other_offset_y = -other_offset_y
left_unreachable_border = min(
left_unreachable_border, other_offset_x - offset_x
)
right_unreachable_border = max(
right_unreachable_border, other_offset_x - offset_x
)
top_unreachable_border = min(
top_unreachable_border, other_offset_y - offset_y
)
bottom_unreachable_border = max(
bottom_unreachable_border, other_offset_y - offset_y
)
half_machine_width = (
self._global_container_stack.getProperty("machine_width", "value") / 2
)
half_machine_depth = (
self._global_container_stack.getProperty("machine_depth", "value") / 2
)
# We need at a minimum a very small border around the edge so that models can't go off the build plate
border_size = max(border_size, 0.1)
if self._shape != "elliptic":
if border_size - left_unreachable_border > 0:
result[extruder_id].append(
Polygon(
numpy.array(
[
[-half_machine_width, -half_machine_depth],
[-half_machine_width, half_machine_depth],
[
-half_machine_width
+ border_size
- left_unreachable_border,
half_machine_depth
- border_size
- bottom_unreachable_border,
],
[
-half_machine_width
+ border_size
- left_unreachable_border,
-half_machine_depth
+ border_size
- top_unreachable_border,
],
],
numpy.float32,
)
)
)
if border_size + right_unreachable_border > 0:
result[extruder_id].append(
Polygon(
numpy.array(
[
[half_machine_width, half_machine_depth],
[half_machine_width, -half_machine_depth],
[
half_machine_width
- border_size
- right_unreachable_border,
-half_machine_depth
+ border_size
- top_unreachable_border,
],
[
half_machine_width
- border_size
- right_unreachable_border,
half_machine_depth
- border_size
- bottom_unreachable_border,
],
],
numpy.float32,
)
)
)
if border_size + bottom_unreachable_border > 0:
result[extruder_id].append(
Polygon(
numpy.array(
[
[-half_machine_width, half_machine_depth],
[half_machine_width, half_machine_depth],
[
half_machine_width
- border_size
- right_unreachable_border,
half_machine_depth
- border_size
- bottom_unreachable_border,
],
[
-half_machine_width
+ border_size
- left_unreachable_border,
half_machine_depth
- border_size
- bottom_unreachable_border,
],
],
numpy.float32,
)
)
)
if border_size - top_unreachable_border > 0:
result[extruder_id].append(
Polygon(
numpy.array(
[
[half_machine_width, -half_machine_depth],
[-half_machine_width, -half_machine_depth],
[
-half_machine_width
+ border_size
- left_unreachable_border,
-half_machine_depth
+ border_size
- top_unreachable_border,
],
[
half_machine_width
- border_size
- right_unreachable_border,
-half_machine_depth
+ border_size
- top_unreachable_border,
],
],
numpy.float32,
)
)
)
else:
sections = 32
arc_vertex = [0, half_machine_depth - border_size]
for i in range(0, sections):
quadrant = math.floor(4 * i / sections)
vertices = []
if quadrant == 0:
vertices.append([-half_machine_width, half_machine_depth])
elif quadrant == 1:
vertices.append([-half_machine_width, -half_machine_depth])
elif quadrant == 2:
vertices.append([half_machine_width, -half_machine_depth])
elif quadrant == 3:
vertices.append([half_machine_width, half_machine_depth])
vertices.append(arc_vertex)
angle = 2 * math.pi * (i + 1) / sections
arc_vertex = [
-(half_machine_width - border_size) * math.sin(angle),
(half_machine_depth - border_size) * math.cos(angle),
]
vertices.append(arc_vertex)
result[extruder_id].append(
Polygon(numpy.array(vertices, numpy.float32))
)
if border_size > 0:
result[extruder_id].append(
Polygon(
numpy.array(
[
[-half_machine_width, -half_machine_depth],
[-half_machine_width, half_machine_depth],
[-half_machine_width + border_size, 0],
],
numpy.float32,
)
)
)
result[extruder_id].append(
Polygon(
numpy.array(
[
[-half_machine_width, half_machine_depth],
[half_machine_width, half_machine_depth],
[0, half_machine_depth - border_size],
],
numpy.float32,
)
)
)
result[extruder_id].append(
Polygon(
numpy.array(
[
[half_machine_width, half_machine_depth],
[half_machine_width, -half_machine_depth],
[half_machine_width - border_size, 0],
],
numpy.float32,
)
)
)
result[extruder_id].append(
Polygon(
numpy.array(
[
[half_machine_width, -half_machine_depth],
[-half_machine_width, -half_machine_depth],
[0, -half_machine_depth + border_size],
],
numpy.float32,
)
)
)
return result
def _getSettingFromAllExtruders(self, setting_key: str) -> List[Any]:
"""Private convenience function to get a setting from every extruder.
For single extrusion machines, this gets the setting from the global stack.
:return: A sequence of setting values, one for each extruder.
"""
all_values = ExtruderManager.getInstance().getAllExtruderSettings(
setting_key, "value"
)
all_types = ExtruderManager.getInstance().getAllExtruderSettings(
setting_key, "type"
)
for i, (setting_value, setting_type) in enumerate(zip(all_values, all_types)):
if not setting_value and setting_type in ["int", "float"]:
all_values[i] = 0
return all_values
def _calculateBedAdhesionSize(self, used_extruders):
"""Get the bed adhesion size for the global container stack and used extruders
:param adhesion_override: override adhesion type.
Use None to use the global stack default, "none" for no adhesion, "brim" for brim etc.
"""
if self._global_container_stack is None:
return None
container_stack = self._global_container_stack
adhesion_type = container_stack.getProperty("adhesion_type", "value")
if adhesion_type == "raft":
bed_adhesion_size = self._global_container_stack.getProperty(
"raft_margin", "value"
) # Should refer to the raft extruder if set.
else: # raft, brim or skirt. Those last two are handled by CuraEngine.
bed_adhesion_size = 0
max_length_available = 0.5 * min(
self._global_container_stack.getProperty("machine_width", "value"),
self._global_container_stack.getProperty("machine_depth", "value"),
)
bed_adhesion_size = min(bed_adhesion_size, max_length_available)
return bed_adhesion_size
def _calculateFarthestShieldDistance(self, container_stack):
farthest_shield_distance = 0
if container_stack.getProperty("draft_shield_enabled", "value"):
farthest_shield_distance = max(
farthest_shield_distance,
container_stack.getProperty("draft_shield_dist", "value"),
)
if container_stack.getProperty("ooze_shield_enabled", "value"):
farthest_shield_distance = max(
farthest_shield_distance,
container_stack.getProperty("ooze_shield_dist", "value"),
)
return farthest_shield_distance
def _calculateSupportExpansion(self, container_stack):
support_expansion = 0
support_enabled = self._global_container_stack.getProperty(
"support_enable", "value"
)
support_offset = self._global_container_stack.getProperty(
"support_offset", "value"
)
if support_enabled and support_offset:
support_expansion += support_offset
return support_expansion
def _calculateMoveFromWallRadius(self, used_extruders):
move_from_wall_radius = 0 # Moves that start from outer wall.
for stack in used_extruders:
if stack.getProperty("travel_avoid_other_parts", "value"):
move_from_wall_radius = max(
move_from_wall_radius,
stack.getProperty("travel_avoid_distance", "value"),
)
infill_wipe_distance = stack.getProperty("infill_wipe_dist", "value")
num_walls = stack.getProperty("wall_line_count", "value")
if (
num_walls >= 1
): # Infill wipes start from the infill, so subtract the total wall thickness from this.
infill_wipe_distance -= stack.getProperty("wall_line_width_0", "value")
if num_walls >= 2:
infill_wipe_distance -= stack.getProperty(
"wall_line_width_x", "value"
) * (num_walls - 1)
move_from_wall_radius = max(move_from_wall_radius, infill_wipe_distance)
return move_from_wall_radius
def getEdgeDisallowedSize(self):
"""Calculate the disallowed radius around the edge.
This disallowed radius is to allow for space around the models that is not part of the collision radius,
such as bed adhesion (skirt/brim/raft) and travel avoid distance.
"""
if (
not self._global_container_stack
or not self._global_container_stack.extruderList
):
return 0
if self._edge_disallowed_size is not None:
return self._edge_disallowed_size
container_stack = self._global_container_stack
used_extruders = ExtruderManager.getInstance().getUsedExtruderStacks()
# If we are printing one at a time, we need to add the bed adhesion size to the disallowed areas of the objects
if container_stack.getProperty("print_sequence", "value") == "one_at_a_time":
return 0.1
bed_adhesion_size = self._calculateBedAdhesionSize(used_extruders)
support_expansion = self._calculateSupportExpansion(
self._global_container_stack
)
farthest_shield_distance = self._calculateFarthestShieldDistance(
self._global_container_stack
)
move_from_wall_radius = self._calculateMoveFromWallRadius(used_extruders)
# Now combine our different pieces of data to get the final border size.
# Support expansion is added to the bed adhesion, since the bed adhesion goes around support.
# Support expansion is added to farthest shield distance, since the shields go around support.
self._edge_disallowed_size = max(
move_from_wall_radius,
support_expansion + farthest_shield_distance,
support_expansion + bed_adhesion_size,
)
return self._edge_disallowed_size
def _clamp(self, value, min_value, max_value):
return max(min(value, max_value), min_value)
_machine_settings = [
"machine_width",
"machine_depth",
"machine_height",
"machine_shape",
"machine_center_is_zero",
]
_skirt_settings = [
"adhesion_type",
"skirt_gap",
"skirt_line_count",
"skirt_brim_line_width",
"brim_gap",
"brim_width",
"brim_line_count",
"raft_margin",
"draft_shield_enabled",
"draft_shield_dist",
"initial_layer_line_width_factor",
]
_raft_settings = [
"adhesion_type",
"raft_base_thickness",
"raft_interface_layers",
"raft_interface_thickness",
"raft_surface_layers",
"raft_surface_thickness",
"raft_airgap",
"layer_0_z_overlap",
]
_extra_z_settings = ["retraction_hop_enabled", "retraction_hop"]
_prime_settings = [
"extruder_prime_pos_x",
"extruder_prime_pos_y",
"prime_blob_enable",
]
_tower_settings = [
"prime_tower_enable",
"prime_tower_size",
"prime_tower_position_x",
"prime_tower_position_y",
"prime_tower_brim_enable",
]
_ooze_shield_settings = ["ooze_shield_enabled", "ooze_shield_dist"]
_distance_settings = [
"infill_wipe_dist",
"travel_avoid_distance",
"support_offset",
"support_enable",
"travel_avoid_other_parts",
"travel_avoid_supports",
"wall_line_count",
"wall_line_width_0",
"wall_line_width_x",
]
_extruder_settings = [
"support_enable",
"support_bottom_enable",
"support_roof_enable",
"support_infill_extruder_nr",
"support_extruder_nr_layer_0",
"support_bottom_extruder_nr",
"support_roof_extruder_nr",
"brim_line_count",
"skirt_brim_extruder_nr",
"raft_base_extruder_nr",
"raft_interface_extruder_nr",
"raft_surface_extruder_nr",
"adhesion_type",
] # Settings that can affect which extruders are used.
_limit_to_extruder_settings = [
"wall_extruder_nr",
"wall_0_extruder_nr",
"wall_x_extruder_nr",
"top_bottom_extruder_nr",
"infill_extruder_nr",
"support_infill_extruder_nr",
"support_extruder_nr_layer_0",
"support_bottom_extruder_nr",
"support_roof_extruder_nr",
"skirt_brim_extruder_nr",
"raft_base_extruder_nr",
"raft_interface_extruder_nr",
"raft_surface_extruder_nr",
]
_material_size_settings = [
"material_shrinkage_percentage",
"material_shrinkage_percentage_xy",
"material_shrinkage_percentage_z",
]
_disallowed_area_settings = (
_skirt_settings
+ _prime_settings
+ _tower_settings
+ _ooze_shield_settings
+ _distance_settings
+ _extruder_settings
+ _material_size_settings
)
|
draftguitools | gui_fillets | # ***************************************************************************
# * (c) 2020 Eliud Cabrera Castillo <e.cabrera-castillo@tum.de> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * FreeCAD is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with FreeCAD; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provides GUI tools to create Fillet objects between two lines.
TODO: Currently this tool uses the DraftGui widgets. We want to avoid using
this big module because it creates manually the interface.
Instead we should provide its own .ui file and task panel,
similar to the OrthoArray tool.
"""
## @package gui_fillet
# \ingroup draftguitools
# \brief Provides GUI tools to create Fillet objects between two lines.
import Draft
import Draft_rc
import draftguitools.gui_base_original as gui_base_original
import draftguitools.gui_tool_utils as gui_tool_utils
import draftutils.utils as utils
import FreeCADGui as Gui
## \addtogroup draftguitools
# @{
import PySide.QtCore as QtCore
from draftutils.messages import _err, _msg
from draftutils.translate import translate
from PySide.QtCore import QT_TRANSLATE_NOOP
# The module is used to prevent complaints from code checkers (flake8)
True if Draft_rc.__name__ else False
class Fillet(gui_base_original.Creator):
"""Gui command for the Fillet tool."""
def __init__(self):
super(Fillet, self).__init__()
self.featureName = "Fillet"
def GetResources(self):
"""Set icon, menu and tooltip."""
return {
"Pixmap": "Draft_Fillet",
"Accel": "F,I",
"MenuText": QT_TRANSLATE_NOOP("Draft_Fillet", "Fillet"),
"ToolTip": QT_TRANSLATE_NOOP(
"Draft_Fillet", "Creates a fillet between two selected wires or edges."
),
}
def Activated(self, name="Fillet"):
"""Execute when the command is called."""
super(Fillet, self).Activated(name=name)
if self.ui:
self.rad = 100
self.chamfer = False
self.delete = False
label = translate("draft", "Fillet radius")
tooltip = translate("draft", "Radius of fillet")
# Call the task panel defined in DraftGui to enter a radius.
self.ui.taskUi(title=translate("Draft", "Fillet"), icon="Draft_Fillet")
self.ui.radiusUi()
self.ui.sourceCmd = self
self.ui.labelRadius.setText(label)
self.ui.radiusValue.setToolTip(tooltip)
self.ui.setRadiusValue(self.rad, "Length")
self.ui.check_delete = self.ui._checkbox(
"isdelete", self.ui.layout, checked=self.delete
)
self.ui.check_delete.setText(translate("Draft", "Delete original objects"))
self.ui.check_delete.show()
self.ui.check_chamfer = self.ui._checkbox(
"ischamfer", self.ui.layout, checked=self.chamfer
)
self.ui.check_chamfer.setText(translate("Draft", "Create chamfer"))
self.ui.check_chamfer.show()
self.ui.check_delete.stateChanged.connect(self.set_delete)
self.ui.check_chamfer.stateChanged.connect(self.set_chamfer)
# TODO: somehow we need to set up the trackers
# to show a preview of the fillet.
# self.linetrack = trackers.lineTracker(dotted=True)
# self.arctrack = trackers.arcTracker()
# self.call = self.view.addEventCallback("SoEvent", self.action)
_msg(translate("draft", "Enter radius."))
def action(self, arg):
"""Scene event handler. CURRENTLY NOT USED.
Here the displaying of the trackers (previews)
should be implemented by considering the current value of the
`ui.radiusValue`.
"""
if arg["Type"] == "SoKeyboardEvent":
if arg["Key"] == "ESCAPE":
self.finish()
elif arg["Type"] == "SoLocation2Event":
self.point, ctrlPoint, info = gui_tool_utils.getPoint(self, arg)
gui_tool_utils.redraw3DView()
def set_delete(self):
"""Execute as a callback when the delete checkbox changes."""
self.delete = self.ui.check_delete.isChecked()
_msg(translate("draft", "Delete original objects:") + " " + str(self.delete))
def set_chamfer(self):
"""Execute as a callback when the chamfer checkbox changes."""
self.chamfer = self.ui.check_chamfer.isChecked()
_msg(translate("draft", "Chamfer mode:") + " " + str(self.chamfer))
def numericRadius(self, rad):
"""Validate the entry radius in the user interface.
This function is called by the toolbar or taskpanel interface
when a valid radius has been entered in the input field.
"""
self.rad = rad
self.draw_arc(rad, self.chamfer, self.delete)
self.finish()
def draw_arc(self, rad, chamfer, delete):
"""Process the selection and draw the actual object."""
wires = Gui.Selection.getSelection()
if not wires or len(wires) != 2:
_err(translate("draft", "Two elements needed."))
return
for o in wires:
_msg(utils.get_type(o))
_test = translate("draft", "Test object")
_test_off = translate("draft", "Test object removed")
_cant = translate("draft", "Fillet cannot be created")
_msg(4 * "=" + _test)
arc = Draft.make_fillet(wires, rad)
if not arc:
_err(_cant)
return
self.doc.removeObject(arc.Name)
_msg(4 * "=" + _test_off)
_doc = "FreeCAD.ActiveDocument."
_wires = "["
_wires += _doc + wires[0].Name + ", "
_wires += _doc + wires[1].Name
_wires += "]"
Gui.addModule("Draft")
_cmd = "Draft.make_fillet"
_cmd += "("
_cmd += _wires + ", "
_cmd += "radius=" + str(rad)
if chamfer:
_cmd += ", chamfer=" + str(chamfer)
if delete:
_cmd += ", delete=" + str(delete)
_cmd += ")"
_cmd_list = [
"arc = " + _cmd,
"Draft.autogroup(arc)",
"FreeCAD.ActiveDocument.recompute()",
]
self.commit(translate("draft", "Create fillet"), _cmd_list)
def finish(self, cont=False):
"""Terminate the operation."""
super(Fillet, self).finish()
if self.ui:
# self.linetrack.finalize()
# self.arctrack.finalize()
self.doc.recompute()
Gui.addCommand("Draft_Fillet", Fillet())
## @}
|
bitmessageqt | utils | import hashlib
import os
import state
from addresses import addBMIfNotPresent
from bmconfigparser import config
from PyQt4 import QtGui
str_broadcast_subscribers = "[Broadcast subscribers]"
str_chan = "[chan]"
def identiconize(address):
size = 48
if not config.getboolean("bitmessagesettings", "useidenticons"):
return QtGui.QIcon()
# If you include another identicon library, please generate an
# example identicon with the following md5 hash:
# 3fd4bf901b9d4ea1394f0fb358725b28
identicon_lib = config.safeGet(
"bitmessagesettings", "identiconlib", "qidenticon_two_x"
)
# As an 'identiconsuffix' you could put "@bitmessge.ch" or "@bm.addr"
# to make it compatible with other identicon generators. (Note however,
# that E-Mail programs might convert the BM-address to lowercase first.)
# It can be used as a pseudo-password to salt the generation of
# the identicons to decrease the risk of attacks where someone creates
# an address to mimic someone else's identicon.
identiconsuffix = config.get("bitmessagesettings", "identiconsuffix")
if identicon_lib[: len("qidenticon")] == "qidenticon":
# originally by:
# :Author:Shin Adachi <shn@glucose.jp>
# Licesensed under FreeBSD License.
# stripped from PIL and uses QT instead (by sendiulo, same license)
import qidenticon
icon_hash = hashlib.md5(
addBMIfNotPresent(address) + identiconsuffix
).hexdigest()
use_two_colors = identicon_lib[: len("qidenticon_two")] == "qidenticon_two"
opacity = (
int(
identicon_lib
not in (
"qidenticon_x",
"qidenticon_two_x",
"qidenticon_b",
"qidenticon_two_b",
)
)
* 255
)
penwidth = 0
image = qidenticon.render_identicon(
int(icon_hash, 16), size, use_two_colors, opacity, penwidth
)
# filename = './images/identicons/'+hash+'.png'
# image.save(filename)
idcon = QtGui.QIcon()
idcon.addPixmap(image, QtGui.QIcon.Normal, QtGui.QIcon.Off)
return idcon
elif identicon_lib == "pydenticon":
# Here you could load pydenticon.py
# (just put it in the "src" folder of your Bitmessage source)
from pydenticon import Pydenticon
# It is not included in the source, because it is licensed under GPLv3
# GPLv3 is a copyleft license that would influence our licensing
# Find the source here:
# https://github.com/azaghal/pydenticon
# note that it requires pillow (or PIL) to be installed:
# https://python-pillow.org/
idcon_render = Pydenticon(
addBMIfNotPresent(address) + identiconsuffix, size * 3
)
rendering = idcon_render._render()
data = rendering.convert("RGBA").tostring("raw", "RGBA")
qim = QtGui.QImage(data, size, size, QtGui.QImage.Format_ARGB32)
pix = QtGui.QPixmap.fromImage(qim)
idcon = QtGui.QIcon()
idcon.addPixmap(pix, QtGui.QIcon.Normal, QtGui.QIcon.Off)
return idcon
def avatarize(address):
"""
Loads a supported image for the given address' hash form 'avatars' folder
falls back to default avatar if 'default.*' file exists
falls back to identiconize(address)
"""
idcon = QtGui.QIcon()
icon_hash = hashlib.md5(addBMIfNotPresent(address)).hexdigest()
if address == str_broadcast_subscribers:
# don't hash [Broadcast subscribers]
icon_hash = address
# https://www.riverbankcomputing.com/static/Docs/PyQt4/qimagereader.html#supportedImageFormats
# QImageReader.supportedImageFormats ()
extensions = [
"PNG",
"GIF",
"JPG",
"JPEG",
"SVG",
"BMP",
"MNG",
"PBM",
"PGM",
"PPM",
"TIFF",
"XBM",
"XPM",
"TGA",
]
# try to find a specific avatar
for ext in extensions:
lower_hash = state.appdata + "avatars/" + icon_hash + "." + ext.lower()
upper_hash = state.appdata + "avatars/" + icon_hash + "." + ext.upper()
if os.path.isfile(lower_hash):
idcon.addFile(lower_hash)
return idcon
elif os.path.isfile(upper_hash):
idcon.addFile(upper_hash)
return idcon
# if we haven't found any, try to find a default avatar
for ext in extensions:
lower_default = state.appdata + "avatars/" + "default." + ext.lower()
upper_default = state.appdata + "avatars/" + "default." + ext.upper()
if os.path.isfile(lower_default):
default = lower_default
idcon.addFile(lower_default)
return idcon
elif os.path.isfile(upper_default):
default = upper_default
idcon.addFile(upper_default)
return idcon
# If no avatar is found
return identiconize(address)
|
tools | update_options | """Update the parts of the documentation that are auto-generated.
For example, the options documentation is a Google Doc. It can be generated from
the source code and updated automatically using this script.
"""
__copyright__ = "Copyright (C) 2015-2016 Martin Blais"
__license__ = "GNU GPLv2"
import argparse
import logging
import os
import re
from os import path
import httplib2
from apiclient import discovery
from apiclient.http import MediaInMemoryUpload # pylint: disable=import-error
from beancount.parser import options
from beancount.utils import test_utils
from oauth2client import service_account
def replace_gdocs_document(http, docid, title, contents):
"""Upload new contents for a Google Doc for a plain/text file.
Args:
http: An http connection object with drive credentials.
docid: A string, the ID of the document.
title: A string, the title of the document.
contents: A string, the body of the document.
"""
service = discovery.build("drive", "v3", http=http)
media = MediaInMemoryUpload(
contents.encode("utf8"), mimetype="text/plain", resumable=True
)
return (
service.files()
.update(fileId=docid, body={"name": title}, media_body=media)
.execute()
)
def get_options_docid():
"""Find the options doc id from the redirect file.
Returns:
The id of the doc to fix up.
"""
htaccess = path.join(test_utils.find_repository_root(__file__), ".nginx.conf")
with open(htaccess) as inht:
lines = list(
filter(
None,
map(
re.compile(r".*/doc/options.*(https?://docs.google.com/.*);").match,
inht.readlines(),
),
)
)
assert len(lines) == 1
return list(filter(None, lines[0].group(1).split("/")))[-1]
SERVICE_ACCOUNT_FILE = os.path.expanduser("~/.google-apis-service-account.json")
def get_auth_via_service_account(scopes):
"""Get an authenticated http object via a service account.
Args:
scopes: A string or a list of strings, the scopes to get credentials for.
Returns:
A pair or (credentials, http) objects, where 'http' is an authenticated
http client object, from which you can use the Google APIs.
"""
credentials = service_account.ServiceAccountCredentials.from_json_keyfile_name(
SERVICE_ACCOUNT_FILE, scopes
)
http = httplib2.Http()
credentials.authorize(http)
return credentials, http
def main():
logging.basicConfig(level=logging.INFO, format="%(levelname)-8s: %(message)s")
parser = argparse.ArgumentParser(description=__doc__.strip())
args = parser.parse_args()
# Find the document id.
docid = get_options_docid()
# Connect to the service.
scopes = [
"https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/drive.scripts",
]
_, http = get_auth_via_service_account(scopes)
# Replace the document.
replace_gdocs_document(
http, docid, "Beancount - Options Reference", options.list_options()
)
if __name__ == "__main__":
main()
|
s3 | engine | # coding:utf-8
from __future__ import print_function
import logging
from boto import s3
from boto.exception import S3CreateError, S3ResponseError
from boto.route53.exception import DNSServerError
from cactus.deployment.engine import BaseDeploymentEngine
from cactus.deployment.s3.auth import AWSCredentialsManager
from cactus.deployment.s3.domain import AWSBucket, AWSDomain
from cactus.deployment.s3.file import S3File
from cactus.exceptions import InvalidCredentials
from cactus.utils import ipc
logger = logging.getLogger(__name__)
class S3DeploymentEngine(BaseDeploymentEngine):
FileClass = S3File
CredentialsManagerClass = AWSCredentialsManager
config_bucket_name = "aws-bucket-name"
config_bucket_website = "aws-bucket-website"
config_bucket_region = "aws-bucket-region"
_s3_default_region = "us-east-1"
_s3_port = 443
_s3_is_secure = True
_s3_https_connection_factory = None
def _get_buckets(self):
"""
:returns: The list of buckets found for this account
"""
try:
return self.get_connection().get_all_buckets()
except S3ResponseError as e:
if e.error_code == "InvalidAccessKeyId":
logger.info("Received an Error from AWS:\n %s", e.body)
raise InvalidCredentials()
raise
def _get_bucket_region(self):
return self.site.config.get(self.config_bucket_region, self._s3_default_region)
def _create_connection(self):
"""
Create a new S3 Connection
"""
aws_access_key, aws_secret_key = self.credentials_manager.get_credentials()
return s3.connect_to_region(
self._get_bucket_region(),
aws_access_key_id=aws_access_key.strip(),
aws_secret_access_key=aws_secret_key.strip(),
is_secure=self._s3_is_secure,
port=self._s3_port,
https_connection_factory=self._s3_https_connection_factory,
calling_format="boto.s3.connection.OrdinaryCallingFormat",
)
def get_bucket(self):
"""
:returns: The Bucket if found, None otherwise.
:raises: InvalidCredentials if we can't connect to AWS
"""
buckets = self._get_buckets()
buckets = dict((bucket.name, bucket) for bucket in buckets)
return buckets.get(self.bucket_name)
def create_bucket(self):
"""
:returns: The newly created bucket
"""
try:
# When creating a bucket, the region cannot be "us-east-1" but needs
# to be an empty string, so we do that for now.
# https://github.com/boto/boto3/issues/125#issuecomment-109408790
if self._get_bucket_region() == "us-east-1":
region = ""
else:
region = self._get_bucket_region()
bucket = self.get_connection().create_bucket(
self.bucket_name, policy="public-read", location=region
)
except S3CreateError:
logger.info(
"Bucket with name %s already is used by someone else, "
"please try again with another name",
self.bucket_name,
)
return # TODO: These should be exceptions
# Configure S3 to use the index.html and error.html files for indexes and 404/500s.
bucket.configure_website(self._index_page, self._error_page)
return bucket
def get_website_endpoint(self):
return self.bucket.get_website_endpoint()
def domain_setup(self):
bucket_name = self.site.config.get(self.config_bucket_name)
bucket_name = self.site.config.get(self.config_bucket_name)
if not bucket_name:
logger.warning("No bucket name")
return
aws_access_key, aws_secret_key = self.credentials_manager.get_credentials()
domain = AWSDomain(aws_access_key, aws_secret_key, bucket_name)
try:
domain.setup()
except DNSServerError as e:
logger.debug(e)
ipc.signal("domain.setup.error", {"errorKey": "AccountDisabled"})
logger.error("Account cannot use route 53")
logger.error(e)
def domain_list(self):
bucket_name = self.site.config.get(self.config_bucket_name)
if not bucket_name:
logger.warning("No bucket name")
return
aws_access_key, aws_secret_key = self.credentials_manager.get_credentials()
domain = AWSDomain(aws_access_key, aws_secret_key, bucket_name)
try:
domain_list = domain.nameServers()
except DNSServerError as e:
print(e)
ipc.signal("domain.list.error", {"errorKey": "AccountDisabled"})
logger.error("Account cannot use route 53")
logger.error(e)
return
if domain_list:
ipc.signal("domain.list.result", {"nameservers": domain_list})
for domain in domain_list:
logger.info(domain)
else:
logger.error("No name servers configured")
def domain_remove(self):
pass
|
models | early_access_feature | from django.db import models
from posthog.models.utils import UUIDModel, sane_repr
class EarlyAccessFeature(UUIDModel):
class Stage(models.TextChoices):
DRAFT = "draft", "draft"
CONCEPT = "concept", "concept"
ALPHA = "alpha", "alpha"
BETA = "beta", "beta"
GENERAL_AVAILABILITY = "general-availability", "general availability"
ARCHIVED = "archived", "archived"
team: models.ForeignKey = models.ForeignKey(
"posthog.Team",
on_delete=models.CASCADE,
related_name="features",
related_query_name="feature",
)
feature_flag: models.ForeignKey = models.ForeignKey(
"posthog.FeatureFlag",
null=True,
blank=True,
on_delete=models.PROTECT,
related_name="features",
related_query_name="feature",
)
name: models.CharField = models.CharField(max_length=200)
description: models.TextField = models.TextField(blank=True)
stage: models.CharField = models.CharField(max_length=40, choices=Stage.choices)
documentation_url: models.URLField = models.URLField(max_length=800, blank=True)
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True)
def __str__(self) -> str:
return self.name
__repr__ = sane_repr("id", "name", "team_id", "stage")
|
clientScripts | create_transfer_metadata | #!/usr/bin/env python
from argparse import ArgumentParser
import django
from lxml import etree
django.setup()
import metrics
# dashboard
from main.models import Transfer
def fetch_set(sip_uuid):
transfer = Transfer.objects.get(uuid=sip_uuid)
return transfer.transfermetadatasetrow
def fetch_fields_and_values(sip_uuid):
metadata_set = fetch_set(sip_uuid)
if metadata_set is None:
return []
results = metadata_set.transfermetadatafieldvalue_set.exclude(
fieldvalue=""
).values_list("field__fieldname", "fieldvalue")
return results
def build_element(label, value, root):
element = etree.SubElement(root, label)
element.text = value
return element
def call(jobs):
parser = ArgumentParser(
description="Create a generic XML document from transfer metadata"
)
parser.add_argument("-S", "--sipUUID", action="store", dest="sip_uuid")
parser.add_argument("-x", "--xmlFile", action="store", dest="xml_file")
for job in jobs:
with job.JobContext():
opts = parser.parse_args(job.args[1:])
root = etree.Element("transfer_metadata")
values = fetch_fields_and_values(opts.sip_uuid)
elements = [build_element(label, value, root) for (label, value) in values]
# If there is no transfer metadata, skip writing the XML
if elements:
tree = etree.ElementTree(root)
tree.write(
opts.xml_file,
pretty_print=True,
xml_declaration=True,
encoding="utf-8",
)
job.pyprint(etree.tostring(tree, encoding="utf8"))
# This is an odd point to mark the transfer as "completed", but it's the
# last step in the "Complete Transfer" microservice group before the folder
# move, so it seems like the best option we have for now.
metrics.transfer_completed(opts.sip_uuid)
|
bitmessagekivy | mpybit | # pylint: disable=too-many-public-methods, unused-variable, too-many-ancestors
# pylint: disable=no-name-in-module, too-few-public-methods, unused-argument
# pylint: disable=attribute-defined-outside-init, too-many-instance-attributes
"""
Bitmessage android(mobile) interface
"""
import logging
import os
import sys
from functools import partial
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.lang import Builder
from kivy.uix.boxlayout import BoxLayout
from kivymd.app import MDApp
from kivymd.uix.bottomsheet import MDCustomBottomSheet
from kivymd.uix.button import MDRaisedButton
from kivymd.uix.dialog import MDDialog
from kivymd.uix.filemanager import MDFileManager
from kivymd.uix.label import MDLabel
from kivymd.uix.list import IRightBodyTouch
from PIL import Image as PilImage
from pybitmessage.bitmessagekivy import identiconGeneration
from pybitmessage.bitmessagekivy.base_navigation import (
BaseContentNavigationDrawer,
BaseIdentitySpinner,
BaseLanguage,
BaseNavigationDrawerDivider,
BaseNavigationDrawerSubheader,
BaseNavigationItem,
)
from pybitmessage.bitmessagekivy.baseclass.common import (
get_identity_list,
load_image_path,
toast,
)
from pybitmessage.bitmessagekivy.baseclass.login import * # noqa: F401, F403
from pybitmessage.bitmessagekivy.baseclass.popup import (
AddAddressPopup,
AddressChangingLoader,
AppClosingPopup,
)
from pybitmessage.bitmessagekivy.get_platform import platform
from pybitmessage.bitmessagekivy.kivy_state import KivyStateVariables
from pybitmessage.bitmessagekivy.load_kivy_screens_data import load_screen_json
from pybitmessage.bitmessagekivy.uikivysignaler import UIkivySignaler
from pybitmessage.bmconfigparser import config # noqa: F401
from pybitmessage.mock.helper_startup import (
loadConfig,
total_encrypted_messages_per_month,
)
logger = logging.getLogger("default")
class Lang(BaseLanguage):
"""UI Language"""
class NavigationItem(BaseNavigationItem):
"""NavigationItem class for kivy Ui"""
class NavigationDrawerDivider(BaseNavigationDrawerDivider):
"""
A small full-width divider that can be placed
in the :class:`MDNavigationDrawer`
"""
class NavigationDrawerSubheader(BaseNavigationDrawerSubheader):
"""
A subheader for separating content in :class:`MDNavigationDrawer`
Works well alongside :class:`NavigationDrawerDivider`
"""
class ContentNavigationDrawer(BaseContentNavigationDrawer):
"""ContentNavigationDrawer class for kivy Uir"""
class BadgeText(IRightBodyTouch, MDLabel):
"""BadgeText class for kivy Ui"""
class IdentitySpinner(BaseIdentitySpinner):
"""Identity Dropdown in Side Navigation bar"""
class NavigateApp(MDApp):
"""Navigation Layout of class"""
kivy_state = KivyStateVariables()
title = "PyBitmessage"
identity_list = get_identity_list()
image_path = load_image_path()
app_platform = platform
encrypted_messages_per_month = total_encrypted_messages_per_month()
tr = Lang("en") # for changing in franch replace en with fr
def __init__(self):
super(NavigateApp, self).__init__()
# workaround for relative imports
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
(
self.data_screens,
self.all_data,
self.data_screen_dict,
response,
) = load_screen_json()
self.kivy_state_obj = KivyStateVariables()
self.image_dir = load_image_path()
self.kivy_state_obj.screen_density = Window.size
self.window_size = self.kivy_state_obj.screen_density
def build(self):
"""Method builds the widget"""
for kv in self.data_screens:
Builder.load_file(
os.path.join(
os.path.dirname(__file__),
"kv",
"{0}.kv".format(self.all_data[kv]["kv_string"]),
)
)
Window.bind(on_request_close=self.on_request_close)
return Builder.load_file(os.path.join(os.path.dirname(__file__), "main.kv"))
def set_screen(self, screen_name):
"""Set the screen name when navigate to other screens"""
self.root.ids.scr_mngr.current = screen_name
def run(self):
"""Running the widgets"""
loadConfig()
kivysignalthread = UIkivySignaler()
kivysignalthread.daemon = True
kivysignalthread.start()
self.kivy_state_obj.kivyui_ready.set()
super(NavigateApp, self).run()
def addingtoaddressbook(self):
"""Dialog for saving address"""
width = 0.85 if platform == "android" else 0.8
self.add_popup = MDDialog(
title="Add contact",
type="custom",
size_hint=(width, 0.23),
content_cls=AddAddressPopup(),
buttons=[
MDRaisedButton(
text="Save",
on_release=self.savecontact,
),
MDRaisedButton(
text="Cancel",
on_release=self.close_pop,
),
MDRaisedButton(
text="Scan QR code",
on_release=self.scan_qr_code,
),
],
)
self.add_popup.auto_dismiss = False
self.add_popup.open()
def scan_qr_code(self, instance):
"""this method is used for showing QR code scanner"""
if self.is_camara_attached():
self.add_popup.dismiss()
self.root.ids.id_scanscreen.get_screen(
self.root.ids.scr_mngr.current, self.add_popup
)
self.root.ids.scr_mngr.current = "scanscreen"
else:
alert_text = (
"Currently this feature is not avaialbe!"
if platform == "android"
else "Camera is not available!"
)
self.add_popup.dismiss()
toast(alert_text)
def is_camara_attached(self):
"""This method is for checking the camera is available or not"""
self.root.ids.id_scanscreen.check_camera()
is_available = self.root.ids.id_scanscreen.camera_available
return is_available
def savecontact(self, instance):
"""Method is used for saving contacts"""
popup_obj = self.add_popup.content_cls
label = popup_obj.ids.label.text.strip()
address = popup_obj.ids.address.text.strip()
popup_obj.ids.label.focus = not label
# default focus on address field
popup_obj.ids.address.focus = label or not address
def close_pop(self, instance):
"""Close the popup"""
self.add_popup.dismiss()
toast("Canceled")
def loadMyAddressScreen(self, action):
"""loadMyAddressScreen method spin the loader"""
if len(self.root.ids.id_myaddress.children) <= 2:
self.root.ids.id_myaddress.children[0].active = action
else:
self.root.ids.id_myaddress.children[1].active = action
def load_screen(self, instance):
"""This method is used for loading screen on every click"""
if instance.text == "Inbox":
self.root.ids.scr_mngr.current = "inbox"
self.root.ids.id_inbox.children[1].active = True
elif instance.text == "Trash":
self.root.ids.scr_mngr.current = "trash"
try:
self.root.ids.id_trash.children[1].active = True
except Exception as e:
self.root.ids.id_trash.children[0].children[1].active = True
Clock.schedule_once(partial(self.load_screen_callback, instance), 1)
def load_screen_callback(self, instance, dt=0):
"""This method is rotating loader for few seconds"""
if instance.text == "Inbox":
self.root.ids.id_inbox.ids.ml.clear_widgets()
self.root.ids.id_inbox.loadMessagelist(self.kivy_state_obj.selected_address)
self.root.ids.id_inbox.children[1].active = False
elif instance.text == "Trash":
self.root.ids.id_trash.clear_widgets()
self.root.ids.id_trash.add_widget(self.data_screen_dict["Trash"].Trash())
try:
self.root.ids.id_trash.children[1].active = False
except Exception as e:
self.root.ids.id_trash.children[0].children[1].active = False
@staticmethod
def get_enabled_addresses():
"""Getting list of all the enabled addresses"""
addresses = [
addr
for addr in config.addresses()
if config.getboolean(str(addr), "enabled")
]
return addresses
@staticmethod
def format_address(address):
"""Formatting address"""
return " ({0})".format(address)
@staticmethod
def format_label(label):
"""Formatting label"""
if label:
f_name = label.split()
truncate_string = "..."
f_name_max_length = 15
formatted_label = (
f_name[0][:14].capitalize() + truncate_string
if len(f_name[0]) > f_name_max_length
else f_name[0].capitalize()
)
return formatted_label
return ""
@staticmethod
def format_address_and_label(address=None):
"""Getting formatted address information"""
if not address:
try:
address = NavigateApp.get_enabled_addresses()[0]
except IndexError:
return ""
return "{0}{1}".format(
NavigateApp.format_label(config.get(address, "label")),
NavigateApp.format_address(address),
)
def getDefaultAccData(self, instance):
"""Getting Default Account Data"""
if self.identity_list:
self.kivy_state_obj.selected_address = first_addr = self.identity_list[0]
return first_addr
return "Select Address"
def getCurrentAccountData(self, text):
"""Get Current Address Account Data"""
if text != "":
if os.path.exists(
os.path.join(self.image_dir, "default_identicon", "{}.png".format(text))
):
self.load_selected_Image(text)
else:
self.set_identicon(text)
self.root.ids.content_drawer.ids.reset_image.opacity = 0
self.root.ids.content_drawer.ids.reset_image.disabled = True
address_label = self.format_address_and_label(text)
self.root_window.children[1].ids.toolbar.title = address_label
self.kivy_state_obj.selected_address = text
AddressChangingLoader().open()
for nav_obj in (
self.root.ids.content_drawer.children[0]
.children[0]
.children[0]
.children
):
nav_obj.active = True if nav_obj.text == "Inbox" else False
self.fileManagerSetting()
Clock.schedule_once(self.setCurrentAccountData, 0.5)
def setCurrentAccountData(self, dt=0):
"""This method set the current accout data on all the screens"""
self.root.ids.id_inbox.ids.ml.clear_widgets()
self.root.ids.id_inbox.loadMessagelist(self.kivy_state_obj.selected_address)
self.root.ids.id_sent.ids.ml.clear_widgets()
self.root.ids.id_sent.children[2].children[2].ids.search_field.text = ""
self.root.ids.id_sent.loadSent(self.kivy_state_obj.selected_address)
def fileManagerSetting(self):
"""This method is for file manager setting"""
if (
not self.root.ids.content_drawer.ids.file_manager.opacity
and self.root.ids.content_drawer.ids.file_manager.disabled
):
self.root.ids.content_drawer.ids.file_manager.opacity = 1
self.root.ids.content_drawer.ids.file_manager.disabled = False
def on_request_close(self, *args): # pylint: disable=no-self-use
"""This method is for app closing request"""
AppClosingPopup().open()
return True
def clear_composer(self):
"""If slow down, the new composer edit screen"""
self.set_navbar_for_composer()
composer_obj = self.root.ids.id_create.children[1].ids
composer_obj.ti.text = ""
composer_obj.composer_dropdown.text = "Select"
composer_obj.txt_input.text = ""
composer_obj.subject.text = ""
composer_obj.body.text = ""
self.kivy_state_obj.in_composer = True
self.kivy_state_obj = False
def set_navbar_for_composer(self):
"""Clearing toolbar data when composer open"""
self.root.ids.toolbar.left_action_items = [
["arrow-left", lambda x: self.back_press()]
]
self.root.ids.toolbar.right_action_items = [
["refresh", lambda x: self.root.ids.id_create.children[1].reset_composer()],
["send", lambda x: self.root.ids.id_create.children[1].send(self)],
]
def set_identicon(self, text):
"""Show identicon in address spinner"""
img = identiconGeneration.generate(text)
self.root.ids.content_drawer.ids.top_box.children[0].texture = img.texture
# pylint: disable=import-outside-toplevel
def file_manager_open(self):
"""This method open the file manager of local system"""
if not self.kivy_state_obj.file_manager:
self.file_manager = MDFileManager(
exit_manager=self.exit_manager,
select_path=self.select_path,
ext=[".png", ".jpg"],
)
self.file_manager.previous = False
self.file_manager.current_path = "/"
if platform == "android":
# pylint: disable=import-error
from android.permissions import (
Permission,
check_permission,
request_permissions,
)
if check_permission(Permission.WRITE_EXTERNAL_STORAGE) and check_permission(
Permission.READ_EXTERNAL_STORAGE
):
self.file_manager.show(os.getenv("EXTERNAL_STORAGE"))
self.kivy_state_obj.manager_open = True
else:
request_permissions(
[
Permission.WRITE_EXTERNAL_STORAGE,
Permission.READ_EXTERNAL_STORAGE,
]
)
else:
self.file_manager.show(os.environ["HOME"])
self.kivy_state_obj.manager_open = True
def select_path(self, path):
"""This method is used to set the select image"""
try:
newImg = PilImage.open(path).resize((300, 300))
if platform == "android":
android_path = os.path.join(
os.path.join(os.environ["ANDROID_PRIVATE"], "app", "images", "kivy")
)
if not os.path.exists(os.path.join(android_path, "default_identicon")):
os.makedirs(os.path.join(android_path, "default_identicon"))
newImg.save(
os.path.join(
android_path,
"default_identicon",
"{}.png".format(self.kivy_state_obj.selected_address),
)
)
else:
if not os.path.exists(
os.path.join(self.image_dir, "default_identicon")
):
os.makedirs(os.path.join(self.image_dir, "default_identicon"))
newImg.save(
os.path.join(
self.image_dir,
"default_identicon",
"{0}.png".format(self.kivy_state_obj.selected_address),
)
)
self.load_selected_Image(self.kivy_state_obj.selected_address)
toast("Image changed")
except Exception:
toast("Exit")
self.exit_manager()
def exit_manager(self, *args):
"""Called when the user reaches the root of the directory tree."""
self.kivy_state_obj.manager_open = False
self.file_manager.close()
def load_selected_Image(self, curerentAddr):
"""This method load the selected image on screen"""
top_box_obj = self.root.ids.content_drawer.ids.top_box.children[0]
top_box_obj.source = os.path.join(
self.image_dir, "default_identicon", "{0}.png".format(curerentAddr)
)
self.root.ids.content_drawer.ids.reset_image.opacity = 1
self.root.ids.content_drawer.ids.reset_image.disabled = False
top_box_obj.reload()
def rest_default_avatar_img(self):
"""set default avatar generated image"""
self.set_identicon(self.kivy_state_obj.selected_address)
img_path = os.path.join(
self.image_dir,
"default_identicon",
"{}.png".format(self.kivy_state_obj.selected_address),
)
try:
if os.path.exists(img_path):
os.remove(img_path)
self.root.ids.content_drawer.ids.reset_image.opacity = 0
self.root.ids.content_drawer.ids.reset_image.disabled = True
except Exception as e:
pass
toast("Avatar reset")
def get_default_logo(self, instance):
"""Getting default logo image"""
if self.identity_list:
first_addr = self.identity_list[0]
if config.getboolean(str(first_addr), "enabled"):
if os.path.exists(
os.path.join(
self.image_dir, "default_identicon", "{}.png".format(first_addr)
)
):
return os.path.join(
self.image_dir, "default_identicon", "{}.png".format(first_addr)
)
else:
img = identiconGeneration.generate(first_addr)
instance.texture = img.texture
return
return os.path.join(self.image_dir, "drawer_logo1.png")
@staticmethod
def have_any_address():
"""Checking existance of any address"""
if config.addresses():
return True
return False
def reset_login_screen(self):
"""This method is used for clearing the widgets of random screen"""
if self.root.ids.id_newidentity.ids.add_random_bx.children:
self.root.ids.id_newidentity.ids.add_random_bx.clear_widgets()
def reset(self, *args):
"""Set transition direction"""
self.root.ids.scr_mngr.transition.direction = "left"
self.root.ids.scr_mngr.transition.unbind(on_complete=self.reset)
def back_press(self):
"""Method for, reverting composer to previous page"""
if self.root.ids.scr_mngr.current == "showqrcode":
self.set_common_header()
self.root.ids.scr_mngr.current = "myaddress"
self.root.ids.scr_mngr.transition.bind(on_complete=self.reset)
self.kivy_state.in_composer = False
def set_toolbar_for_QrCode(self):
"""This method is use for setting Qr code toolbar."""
self.root.ids.toolbar.left_action_items = [
["arrow-left", lambda x: self.back_press()]
]
self.root.ids.toolbar.right_action_items = []
def set_common_header(self):
"""Common header for all the Screens"""
self.root.ids.toolbar.right_action_items = [
["account-plus", lambda x: self.addingtoaddressbook()]
]
self.root.ids.toolbar.left_action_items = [
["menu", lambda x: self.root.ids.nav_drawer.set_state("toggle")]
]
return
def open_payment_layout(self, sku):
"""It basically open up a payment layout for kivy UI"""
pml = PaymentMethodLayout()
self.product_id = sku
self.custom_sheet = MDCustomBottomSheet(screen=pml)
self.custom_sheet.open()
def initiate_purchase(self, method_name):
"""initiate_purchase module"""
logger.debug("Purchasing %s through %s", self.product_id, method_name)
class PaymentMethodLayout(BoxLayout):
"""PaymentMethodLayout class for kivy Ui"""
if __name__ == "__main__":
NavigateApp().run()
|
community | discovery | import time
from random import choice
from ipv8.messaging.anonymization.tunnel import PEER_FLAG_EXIT_BT
from ipv8.peerdiscovery.discovery import DiscoveryStrategy
class GoldenRatioStrategy(DiscoveryStrategy):
"""
Strategy for removing peers once we have too many in the TunnelCommunity.
This strategy will remove a "normal" peer if the current ratio of "normal" peers to exit node peers is larger
than the set golden ratio.
This strategy will remove an exit peer if the current ratio of "normal" peers to exit node peers is smaller than
the set golden ratio.
"""
def __init__(self, overlay, golden_ratio=9 / 16, target_peers=23):
"""
Initialize the GoldenRatioStrategy.
:param overlay: the overlay instance to walk over
:type overlay: TriblerTunnelCommunity
:param golden_ratio: the ratio of normal/exit node peers to pursue (between 0.0 and 1.0)
:type golden_ratio: float
:param target_peers: the amount of peers at which to start removing (>0)
:type target_peers: int
:returns: None
"""
super().__init__(overlay)
self.golden_ratio = golden_ratio
self.target_peers = target_peers
self.intro_sent = {}
assert target_peers > 0
assert 0.0 <= golden_ratio <= 1.0
def take_step(self):
"""
We are asked to update, see if we have enough peers to start culling them.
If we do have enough peers, select a suitable peer to remove.
:returns: None
"""
with self.walk_lock:
peers = self.overlay.get_peers()
for peer in list(self.intro_sent.keys()):
if peer not in peers:
self.intro_sent.pop(peer, None)
# Some of the peers in the community could have been discovered using the DiscoveryCommunity. If this
# happens we have no knowledge of their peer_flags. In order to still get the flags we send them an
# introduction request manually.
now = time.time()
for peer in peers:
if (
peer not in self.overlay.candidates
and now > self.intro_sent.get(peer, 0) + 300
):
self.overlay.send_introduction_request(peer)
self.intro_sent[peer] = now
peer_count = len(peers)
if peer_count > self.target_peers:
exit_peers = set(self.overlay.get_candidates(PEER_FLAG_EXIT_BT))
exit_count = len(exit_peers)
ratio = (
1.0 - exit_count / peer_count
) # Peer count is > 0 per definition
if ratio < self.golden_ratio:
self.overlay.network.remove_peer(choice(list(exit_peers)))
elif ratio > self.golden_ratio:
self.overlay.network.remove_peer(
choice(list(set(self.overlay.get_peers()) - exit_peers))
)
|
forms | users | from CTFd.constants.config import Configs
from CTFd.constants.languages import SELECT_LANGUAGE_LIST
from CTFd.forms import BaseForm
from CTFd.forms.fields import SubmitField
from CTFd.models import UserFieldEntries, UserFields
from CTFd.utils.countries import SELECT_COUNTRIES_LIST
from flask_babel import lazy_gettext as _l
from wtforms import BooleanField, PasswordField, SelectField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import InputRequired
def build_custom_user_fields(
form_cls,
include_entries=False,
fields_kwargs=None,
field_entries_kwargs=None,
blacklisted_items=(),
):
"""
Function used to reinject values back into forms for accessing by themes
"""
if fields_kwargs is None:
fields_kwargs = {}
if field_entries_kwargs is None:
field_entries_kwargs = {}
fields = []
new_fields = UserFields.query.filter_by(**fields_kwargs).all()
user_fields = {}
# Only include preexisting values if asked
if include_entries is True:
for f in UserFieldEntries.query.filter_by(**field_entries_kwargs).all():
user_fields[f.field_id] = f.value
for field in new_fields:
if field.name.lower() in blacklisted_items:
continue
form_field = getattr(form_cls, f"fields[{field.id}]")
# Add the field_type to the field so we know how to render it
form_field.field_type = field.field_type
# Only include preexisting values if asked
if include_entries is True:
initial = user_fields.get(field.id, "")
form_field.data = initial
if form_field.render_kw:
form_field.render_kw["data-initial"] = initial
else:
form_field.render_kw = {"data-initial": initial}
fields.append(form_field)
return fields
def attach_custom_user_fields(form_cls, **kwargs):
"""
Function used to attach form fields to wtforms.
Not really a great solution but is approved by wtforms.
https://wtforms.readthedocs.io/en/2.3.x/specific_problems/#dynamic-form-composition
"""
new_fields = UserFields.query.filter_by(**kwargs).all()
for field in new_fields:
validators = []
if field.required:
validators.append(InputRequired())
if field.field_type == "text":
input_field = StringField(
field.name, description=field.description, validators=validators
)
elif field.field_type == "boolean":
input_field = BooleanField(
field.name, description=field.description, validators=validators
)
setattr(form_cls, f"fields[{field.id}]", input_field)
def build_registration_code_field(form_cls):
"""
Build the appropriate field so we can render it via the extra property.
Add field_type so Jinja knows how to render it.
"""
if Configs.registration_code:
field = getattr(form_cls, "registration_code") # noqa B009
field.field_type = "text"
return [field]
else:
return []
def attach_registration_code_field(form_cls):
"""
If we have a registration code required, we attach it to the form similar
to attach_custom_user_fields
"""
if Configs.registration_code:
setattr( # noqa B010
form_cls,
"registration_code",
StringField(
"Registration Code",
description="Registration code required to create account",
validators=[InputRequired()],
),
)
class UserSearchForm(BaseForm):
field = SelectField(
"Search Field",
choices=[
("name", "Name"),
("id", "ID"),
("email", "Email"),
("affiliation", "Affiliation"),
("website", "Website"),
("ip", "IP Address"),
],
default="name",
validators=[InputRequired()],
)
q = StringField("Parameter", validators=[InputRequired()])
submit = SubmitField("Search")
class PublicUserSearchForm(BaseForm):
field = SelectField(
_l("Search Field"),
choices=[
("name", _l("Name")),
("affiliation", _l("Affiliation")),
("website", _l("Website")),
],
default="name",
validators=[InputRequired()],
)
q = StringField(
_l("Parameter"),
description=_l("Search for matching users"),
validators=[InputRequired()],
)
submit = SubmitField(_l("Search"))
class UserBaseForm(BaseForm):
name = StringField("User Name", validators=[InputRequired()])
email = EmailField("Email", validators=[InputRequired()])
language = SelectField(_l("Language"), choices=SELECT_LANGUAGE_LIST)
password = PasswordField("Password")
website = StringField("Website")
affiliation = StringField("Affiliation")
country = SelectField("Country", choices=SELECT_COUNTRIES_LIST)
type = SelectField("Type", choices=[("user", "User"), ("admin", "Admin")])
verified = BooleanField("Verified")
hidden = BooleanField("Hidden")
banned = BooleanField("Banned")
submit = SubmitField("Submit")
def UserEditForm(*args, **kwargs):
class _UserEditForm(UserBaseForm):
pass
@property
def extra(self):
return build_custom_user_fields(
self,
include_entries=True,
fields_kwargs=None,
field_entries_kwargs={"user_id": self.obj.id},
)
def __init__(self, *args, **kwargs):
"""
Custom init to persist the obj parameter to the rest of the form
"""
super().__init__(*args, **kwargs)
obj = kwargs.get("obj")
if obj:
self.obj = obj
attach_custom_user_fields(_UserEditForm)
return _UserEditForm(*args, **kwargs)
def UserCreateForm(*args, **kwargs):
class _UserCreateForm(UserBaseForm):
notify = BooleanField("Email account credentials to user", default=True)
@property
def extra(self):
return build_custom_user_fields(self, include_entries=False)
attach_custom_user_fields(_UserCreateForm)
return _UserCreateForm(*args, **kwargs)
|
plotting | grid | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Timothée Lecomte
# This file is part of Friture.
#
# Friture is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# Friture is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Friture. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from friture.plotting.canvasBackground import CanvasBackground
from PyQt5 import QtCore, QtGui
class Grid:
def __init__(self, *args):
self.canvas_width = 2
self.canvas_height = 2
self.need_transform = False
self.cache_pixmap = QtGui.QPixmap()
self.xMajorTick = np.array([])
self.xMinorTick = np.array([])
self.yMajorTick = np.array([])
self.yMinorTick = np.array([])
self.background = CanvasBackground()
def setGrid(self, xMajorTick, xMinorTick, yMajorTick, yMinorTick):
self.xMajorTick = xMajorTick
self.xMinorTick = xMinorTick
self.yMajorTick = yMajorTick
self.yMinorTick = yMinorTick
self.need_transform = True
def drawToCache(self, painter, xMap, yMap, rect):
w = rect.width()
h = rect.height()
xMajorTick = xMap.toScreen(self.xMajorTick)
xMinorTick = xMap.toScreen(self.xMinorTick)
yMajorTick = h - yMap.toScreen(self.yMajorTick)
yMinorTick = h - yMap.toScreen(self.yMinorTick)
self.cache_pixmap = QtGui.QPixmap(w, h)
self.cache_pixmap.fill(QtCore.Qt.transparent)
painter = QtGui.QPainter(self.cache_pixmap)
self.background.directDraw(painter, rect)
painter.setPen(QtGui.QPen(QtGui.QColor(QtCore.Qt.gray)))
for x in xMajorTick:
painter.drawLine(x, 0, x, h)
painter.setPen(QtGui.QPen(QtGui.QColor(QtCore.Qt.lightGray)))
for x in xMinorTick:
painter.drawLine(x, 0, x, h)
painter.setPen(QtGui.QPen(QtGui.QColor(QtCore.Qt.gray)))
for y in yMajorTick:
painter.drawLine(0, y, w, y)
# given the usual aspect ratio of the canvas, the vertical minor ticks would make it look crowded
# painter.setPen(QtGui.QPen(QtGui.QColor(QtCore.Qt.lightGray)))
# for y in yMinorTick:
# painter.drawLine(0, y, w, y)
def draw(self, painter, xMap, yMap, rect):
# update the cache according to possibly new canvas dimensions
h = rect.height()
w = rect.width()
if w != self.canvas_width:
self.canvas_width = w
self.need_transform = True
if h != self.canvas_height:
self.canvas_height = h
self.need_transform = True
if self.need_transform:
self.drawToCache(painter, xMap, yMap, rect)
self.need_transform = False
painter.drawPixmap(0, 0, self.cache_pixmap)
|
plugins | reuters | """
$description Global business, financial, national and international news.
$url reuters.com
$url reuters.tv
$type live, vod
"""
import logging
import re
from streamlink.plugin import Plugin, PluginError, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
log = logging.getLogger(__name__)
@pluginmatcher(
re.compile(
r"https?://([\w-]+\.)*reuters\.(com|tv)",
)
)
class Reuters(Plugin):
def _get_data(self):
root = self.session.http.get(
self.url,
schema=validate.Schema(
validate.parse_html(),
),
)
try:
log.debug("Trying to find source via meta tag")
schema = validate.Schema(
validate.xml_xpath_string(".//meta[@property='og:video'][1]/@content"),
validate.url(),
)
return schema.validate(root)
except PluginError:
pass
try:
log.debug("Trying to find source via next-head")
schema = validate.Schema(
validate.xml_findtext(".//script[@type='application/ld+json'][@class='next-head']"),
validate.parse_json(),
{"contentUrl": validate.url()},
validate.get("contentUrl"),
)
return schema.validate(root)
except PluginError:
pass
schema_fusion = validate.xml_findtext(".//script[@type='application/javascript'][@id='fusion-metadata']")
schema_video = validate.all(
{"source": {"hls": validate.url()}},
validate.get(("source", "hls")),
)
try:
log.debug("Trying to find source via fusion-metadata globalContent")
schema = validate.Schema(
schema_fusion,
validate.regex(re.compile(r"Fusion\s*\.\s*globalContent\s*=\s*(?P<json>{.+?})\s*;\s*Fusion\s*\.", re.DOTALL)),
validate.get("json"),
validate.parse_json(),
{"result": {"related_content": {"videos": list}}},
validate.get(("result", "related_content", "videos", 0)),
schema_video,
)
return schema.validate(root)
except PluginError:
pass
try:
log.debug("Trying to find source via fusion-metadata contentCache")
schema = validate.Schema(
schema_fusion,
validate.regex(re.compile(r"Fusion\s*\.\s*contentCache\s*=\s*(?P<json>{.+?})\s*;\s*Fusion\s*\.", re.DOTALL)),
validate.get("json"),
validate.parse_json(),
{"videohub-by-guid-v1": {str: {"data": {"result": {"videos": list}}}}},
validate.get("videohub-by-guid-v1"),
validate.transform(lambda obj: obj[list(obj.keys())[0]]),
validate.get(("data", "result", "videos", 0)),
schema_video,
)
return schema.validate(root)
except PluginError:
pass
def _get_streams(self):
hls_url = self._get_data()
if hls_url:
return HLSStream.parse_variant_playlist(self.session, hls_url)
__plugin__ = Reuters
|
dev | ssltest | import os
import select
import socket
import ssl
import sys
import traceback
HOST = "127.0.0.1"
PORT = 8912
def sslProtocolVersion():
# sslProtocolVersion
if sys.version_info >= (2, 7, 13):
# this means TLSv1 or higher
# in the future change to
# ssl.PROTOCOL_TLS1.2
return ssl.PROTOCOL_TLS
elif sys.version_info >= (2, 7, 9):
# this means any SSL/TLS. SSLv2 and 3 are excluded with an option after context is created
return ssl.PROTOCOL_SSLv23
else:
# this means TLSv1, there is no way to set "TLSv1 or higher" or
# "TLSv1.2" in < 2.7.9
return ssl.PROTOCOL_TLSv1
def sslProtocolCiphers():
if ssl.OPENSSL_VERSION_NUMBER >= 0x10100000:
return "AECDH-AES256-SHA@SECLEVEL=0"
else:
return "AECDH-AES256-SHA"
def connect():
sock = socket.create_connection((HOST, PORT))
return sock
def listen():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((HOST, PORT))
sock.listen(0)
return sock
def sslHandshake(sock, server=False):
if sys.version_info >= (2, 7, 9):
context = ssl.SSLContext(sslProtocolVersion())
context.set_ciphers(sslProtocolCiphers())
context.set_ecdh_curve("secp256k1")
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
context.options = (
ssl.OP_ALL
| ssl.OP_NO_SSLv2
| ssl.OP_NO_SSLv3
| ssl.OP_SINGLE_ECDH_USE
| ssl.OP_CIPHER_SERVER_PREFERENCE
)
sslSock = context.wrap_socket(
sock, server_side=server, do_handshake_on_connect=False
)
else:
sslSock = ssl.wrap_socket(
sock,
keyfile=os.path.join("src", "sslkeys", "key.pem"),
certfile=os.path.join("src", "sslkeys", "cert.pem"),
server_side=server,
ssl_version=sslProtocolVersion(),
do_handshake_on_connect=False,
ciphers="AECDH-AES256-SHA",
)
while True:
try:
sslSock.do_handshake()
break
except ssl.SSLWantReadError:
print("Waiting for SSL socket handhake read")
select.select([sslSock], [], [], 10)
except ssl.SSLWantWriteError:
print("Waiting for SSL socket handhake write")
select.select([], [sslSock], [], 10)
except Exception:
print("SSL socket handhake failed, shutting down connection")
traceback.print_exc()
return
print("Success!")
return sslSock
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: ssltest.py client|server")
sys.exit(0)
elif sys.argv[1] == "server":
serversock = listen()
while True:
print("Waiting for connection")
sock, addr = serversock.accept()
print("Got connection from %s:%i" % (addr[0], addr[1]))
sslSock = sslHandshake(sock, True)
if sslSock:
sslSock.shutdown(socket.SHUT_RDWR)
sslSock.close()
elif sys.argv[1] == "client":
sock = connect()
sslSock = sslHandshake(sock, False)
if sslSock:
sslSock.shutdown(socket.SHUT_RDWR)
sslSock.close()
else:
print("Usage: ssltest.py client|server")
sys.exit(0)
|
View | Deliver | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# A GAE web application to aggregate rss and send it to your kindle.
# Visit https://github.com/cdhigh/KindleEar for the latest version
# Contributors:
# rexdf <https://github.com/rexdf>
import gettext
from collections import defaultdict
import web
from apps.BaseHandler import BaseHandler
from apps.dbModels import *
from apps.utils import local_time
from books import BookClass
from google.appengine.api import taskqueue
class Deliver(BaseHandler):
"""判断需要推送哪些书籍"""
__url__ = "/deliver"
def queueit(self, usr, bookid, separate, feedsId=None):
param = {"u": usr.name, "id": bookid}
if feedsId:
param["feedsId"] = feedsId
if usr.merge_books and not separate and not feedsId:
self.queue2push[usr.name].append(str(bookid)) # 合并推送
else:
taskqueue.add(
url="/worker",
queue_name="deliverqueue1",
method="GET",
params=param,
target="worker",
)
def flushqueue(self):
for name in self.queue2push:
param = {"u": name, "id": ",".join(self.queue2push[name])}
taskqueue.add(
url="/worker",
queue_name="deliverqueue1",
method="GET",
params=param,
target="worker",
)
self.queue2push = defaultdict(list)
def GET(self):
username = web.input().get("u")
id_ = web.input().get("id")
feedsId = web.input().get("feedsId")
if id_:
id_ = [int(item) for item in id_.split("|") if item.isdigit()]
self.queue2push = defaultdict(list)
books = Book.all()
if username: # 现在投递【测试使用】,不需要判断时间和星期
user = KeUser.all().filter("name = ", username).get()
if not user or not user.kindle_email:
return self.render(
"autoback.html",
"Delivering",
tips=_("The username not exist or the email of kindle is empty."),
)
sent = []
if id_: # 推送特定账号指定的书籍,这里不判断特定账号是否已经订阅了指定的书籍,只要提供就推送
books2push = [
Book.get_by_id(item) for item in id_ if Book.get_by_id(item)
]
else: # 推送特定账号所有的书籍
books2push = [item for item in books if username in item.users]
for book in books2push:
self.queueit(user, book.key().id(), book.separate, feedsId)
sent.append(book.title)
self.flushqueue()
if len(sent):
tips = _("Book(s) (%s) put to queue!") % ", ".join(sent)
else:
tips = _("No book to deliver!")
return self.render("autoback.html", "Delivering", tips=tips)
# 定时cron调用
sentcnt = 0
for book in books:
if not book.users: # 没有用户订阅此书
continue
bkcls = None
if book.builtin:
bkcls = BookClass(book.title)
if not bkcls:
continue
# 确定此书是否需要下载
for u in book.users:
user = (
KeUser.all()
.filter("enable_send = ", True)
.filter("name = ", u)
.get()
)
if not user or not user.kindle_email:
continue
# 先判断当天是否需要推送
day = local_time("%A", user.timezone)
usrdays = user.send_days
if bkcls and bkcls.deliver_days: # 按星期推送
days = bkcls.deliver_days
if not isinstance(days, list):
days = [days]
if day not in days:
continue
elif usrdays and day not in usrdays: # 为空也表示每日推送
continue
# 时间判断
h = int(local_time("%H", user.timezone)) + 1
if h >= 24:
h -= 24
if bkcls and bkcls.deliver_times:
times = bkcls.deliver_times
if not isinstance(times, list):
times = [times]
if h not in times:
continue
elif user.send_time != h:
continue
# 到了这里才是需要推送的
self.queueit(user, book.key().id(), book.separate)
sentcnt += 1
self.flushqueue()
return "Put <strong>%d</strong> books to queue!" % sentcnt
|
plugins | lrt | """
$description Live TV channels from LRT, a Lithuanian public, state-owned broadcaster.
$url lrt.lt
$type live
"""
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
@pluginmatcher(
re.compile(
r"https?://(?:www\.)?lrt\.lt/mediateka/tiesiogiai/",
)
)
class LRT(Plugin):
def _get_streams(self):
token_url = self.session.http.get(
self.url,
schema=validate.Schema(
re.compile(r"""var\s+tokenURL\s*=\s*(?P<q>["'])(?P<url>https://\S+)(?P=q)"""),
validate.none_or_all(validate.get("url")),
),
)
if not token_url:
return
hls_url = self.session.http.get(
token_url,
schema=validate.Schema(
validate.parse_json(),
{
"response": {
"data": {
"content": validate.all(
str,
validate.transform(lambda url: url.strip()),
validate.url(path=validate.endswith(".m3u8")),
),
},
},
},
validate.get(("response", "data", "content")),
),
)
return HLSStream.parse_variant_playlist(self.session, hls_url)
__plugin__ = LRT
|
gh-pages | update_feed | #!/usr/bin/env python3
from __future__ import unicode_literals
import datetime
import json
import os.path
import sys
import textwrap
dirn = os.path.dirname
sys.path.insert(0, dirn(dirn(os.path.abspath(__file__))))
from utils import write_file
atom_template = textwrap.dedent(
"""\
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<link rel="self" href="http://ytdl-org.github.io/youtube-dl/update/releases.atom" />
<title>youtube-dl releases</title>
<id>https://yt-dl.org/feed/youtube-dl-updates-feed</id>
<updated>@TIMESTAMP@</updated>
@ENTRIES@
</feed>"""
)
entry_template = textwrap.dedent(
"""
<entry>
<id>https://yt-dl.org/feed/youtube-dl-updates-feed/youtube-dl-@VERSION@</id>
<title>New version @VERSION@</title>
<link href="http://ytdl-org.github.io/youtube-dl" />
<content type="xhtml">
<div xmlns="http://www.w3.org/1999/xhtml">
Downloads available at <a href="https://yt-dl.org/downloads/@VERSION@/">https://yt-dl.org/downloads/@VERSION@/</a>
</div>
</content>
<author>
<name>The youtube-dl maintainers</name>
</author>
<updated>@TIMESTAMP@</updated>
</entry>
"""
)
now = datetime.datetime.now()
now_iso = now.isoformat() + "Z"
atom_template = atom_template.replace("@TIMESTAMP@", now_iso)
versions_info = json.load(open("update/versions.json"))
versions = list(versions_info["versions"].keys())
versions.sort()
entries = []
for v in versions:
fields = v.split(".")
year, month, day = map(int, fields[:3])
faked = 0
patchlevel = 0
while True:
try:
datetime.date(year, month, day)
except ValueError:
day -= 1
faked += 1
assert day > 0
continue
break
if len(fields) >= 4:
try:
patchlevel = int(fields[3])
except ValueError:
patchlevel = 1
timestamp = "%04d-%02d-%02dT00:%02d:%02dZ" % (year, month, day, faked, patchlevel)
entry = entry_template.replace("@TIMESTAMP@", timestamp)
entry = entry.replace("@VERSION@", v)
entries.append(entry)
entries_str = textwrap.indent("".join(entries), "\t")
atom_template = atom_template.replace("@ENTRIES@", entries_str)
write_file("update/releases.atom", atom_template)
|
database | covers_1to2 | # Copyright (C) 2009 Aren Olson (for the old_get_track_key function)
# Copyright (C) 2018 Johannes Sasongko <sasongko@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
__all__ = ["migrate"]
import logging
import os
import xl.collection
import xl.covers
logger = logging.getLogger(__name__)
def old_get_track_key(track):
"""
Get the db mapping key for a track
"""
album = track.get_tag_raw("album", join=True)
compilation = track.get_tag_raw("__compilation")
if compilation:
value = track.get_tag_raw("albumartist")
if value:
tag = "albumartist"
else:
tag = "compilation"
value = compilation
elif album:
tag = "album"
value = album
else:
# no album info, can't store it
return None
return (tag, tuple(value))
def migrate():
"""Migrate covers.db version 1 to 2 (Exaile 4.0)."""
man = xl.covers.MANAGER
if man.db.get("version", 1) != 1:
return
logger.info("Upgrading covers.db to version 2")
valid_cachefiles = set()
old_db = man.db
new_db = {"version": 2}
for coll in xl.collection.COLLECTIONS:
for tr in coll.tracks.values():
key = old_get_track_key(tr._track)
value = old_db.get(key)
if value:
new_key = man._get_track_key(tr)
new_db[new_key] = value
if value.startswith("cache:"):
valid_cachefiles.add(value[6:])
man.db = new_db
man.save()
cachedir = os.path.join(man.location, "cache")
for cachefile in frozenset(os.listdir(cachedir)) - valid_cachefiles:
os.remove(os.path.join(cachedir, cachefile))
|
legecy-translators | flow | """This module translates JS flow into PY flow.
Translates:
IF ELSE
DO WHILE
WHILE
FOR 123
FOR iter
CONTINUE, BREAK, RETURN, LABEL, THROW, TRY, SWITCH
"""
import random
from jsparser import *
from nodevisitor import exp_translator
from utils import *
TO_REGISTER = []
CONTINUE_LABEL = 'JS_CONTINUE_LABEL_%s'
BREAK_LABEL = 'JS_BREAK_LABEL_%s'
PREPARE = '''HOLDER = var.own.get(NAME)\nvar.force_own_put(NAME, PyExceptionToJs(PyJsTempException))\n'''
RESTORE = '''if HOLDER is not None:\n var.own[NAME] = HOLDER\nelse:\n del var.own[NAME]\ndel HOLDER\n'''
TRY_CATCH = '''%stry:\nBLOCKfinally:\n%s''' % (PREPARE, indent(RESTORE))
def get_continue_label(label):
return CONTINUE_LABEL%label.encode('hex')
def get_break_label(label):
return BREAK_LABEL%label.encode('hex')
def pass_until(source, start, tokens=(';',)):
while start < len(source) and source[start] not in tokens:
start+=1
return start+1
def do_bracket_exp(source, start, throw=True):
bra, cand = pass_bracket(source, start, '()')
if throw and not bra:
raise SyntaxError('Missing bracket expression')
bra = exp_translator(bra[1:-1])
if throw and not bra:
raise SyntaxError('Empty bracket condition')
return bra, cand if bra else start
def do_if(source, start):
start += 2 # pass this if
bra, start = do_bracket_exp(source, start, throw=True)
statement, start = do_statement(source, start)
if statement is None:
raise SyntaxError('Invalid if statement')
translated = 'if %s:\n'%bra+indent(statement)
elseif = except_keyword(source, start, 'else')
is_elseif = False
if elseif:
start = elseif
if except_keyword(source, start, 'if'):
is_elseif = True
elseif, start = do_statement(source, start)
if elseif is None:
raise SyntaxError('Invalid if statement)')
if is_elseif:
translated += 'el' + elseif
else:
translated += 'else:\n'+ indent(elseif)
return translated, start
def do_statement(source, start):
"""returns none if not found other functions that begin with 'do_' raise
also this do_ type function passes white space"""
start = pass_white(source, start)
# start is the fist position after initial start that is not a white space or \n
if not start < len(source): #if finished parsing return None
return None, start
if any(startswith_keyword(source[start:], e) for e in {'case', 'default'}):
return None, start
rest = source[start:]
for key, meth in KEYWORD_METHODS.iteritems(): # check for statements that are uniquely defined by their keywords
if rest.startswith(key):
# has to startwith this keyword and the next letter after keyword must be either EOF or not in IDENTIFIER_PART
if len(key)==len(rest) or rest[len(key)] not in IDENTIFIER_PART:
return meth(source, start)
if rest[0] == '{': #Block
return do_block(source, start)
# Now only label and expression left
cand = parse_identifier(source, start, False)
if cand is not None: # it can mean that its a label
label, cand_start = cand
cand_start = pass_white(source, cand_start)
if source[cand_start]==':':
return do_label(source, start)
return do_expression(source, start)
def do_while(source, start):
start += 5 # pass while
bra, start = do_bracket_exp(source, start, throw=True)
statement, start = do_statement(source, start)
if statement is None:
raise SyntaxError('Missing statement to execute in while loop!')
return 'while %s:\n'%bra + indent(statement), start
def do_dowhile(source, start):
start += 2 # pass do
statement, start = do_statement(source, start)
if statement is None:
raise SyntaxError('Missing statement to execute in do while loop!')
start = except_keyword(source, start, 'while')
if not start:
raise SyntaxError('Missing while keyword in do-while loop')
bra, start = do_bracket_exp(source, start, throw=True)
statement += 'if not %s:\n' % bra + indent('break\n')
return 'while 1:\n' + indent(statement), start
def do_block(source, start):
bra, start = pass_bracket(source, start, '{}')
#print source[start:], bra
#return bra +'\n', start
if bra is None:
raise SyntaxError('Missing block ( {code} )')
code = ''
bra = bra[1:-1]+';'
bra_pos = 0
while bra_pos<len(bra):
st, bra_pos = do_statement(bra, bra_pos)
if st is None:
break
code += st
bra_pos = pass_white(bra, bra_pos)
if bra_pos<len(bra):
raise SyntaxError('Block has more code that could not be parsed:\n'+bra[bra_pos:])
return code, start
def do_empty(source, start):
return 'pass\n', start + 1
def do_expression(source, start):
start = pass_white(source, start)
end = pass_until(source, start, tokens=(';',))
if end==start+1: #empty statement
return 'pass\n', end
# AUTOMATIC SEMICOLON INSERTION FOLLOWS
# Without ASI this function would end with: return exp_translator(source[start:end].rstrip(';'))+'\n', end
# ASI makes things a bit more complicated:
# we will try to parse as much as possible, inserting ; in place of last new line in case of error
rev = False
rpos = 0
while True:
try:
code = source[start:end].rstrip(';')
cand = exp_translator(code)+'\n', end
just_to_test = compile(cand[0], '', 'exec')
return cand
except Exception as e:
if not rev:
rev = source[start:end][::-1]
lpos = rpos
while True:
rpos = pass_until(rev, rpos, LINE_TERMINATOR)
if rpos>=len(rev):
raise
if filter(lambda x: x not in SPACE, rev[lpos:rpos]):
break
end = start + len(rev) - rpos + 1
def do_var(source, start):
#todo auto ; insertion
start += 3 #pass var
end = pass_until(source, start, tokens=(';',))
defs = argsplit(source[start:end-1]) # defs is the list of defined vars with optional initializer
code = ''
for de in defs:
var, var_end = parse_identifier(de, 0, True)
TO_REGISTER.append(var)
var_end = pass_white(de, var_end)
if var_end<len(de): # we have something more to parse... It has to start with =
if de[var_end] != '=':
raise SyntaxError('Unexpected initializer in var statement. Expected "=", got "%s"'%de[var_end])
code += exp_translator(de) + '\n'
if not code.strip():
code = 'pass\n'
return code, end
def do_label(source, start):
label, end = parse_identifier(source, start)
end = pass_white(source, end)
#now source[end] must be :
assert source[end]==':'
end += 1
inside, end = do_statement(source, end)
if inside is None:
raise SyntaxError('Missing statement after label')
defs = ''
if inside.startswith('while ') or inside.startswith('for ') or inside.startswith('#for'):
# we have to add contine label as well...
# 3 or 1 since #for loop type has more lines before real for.
sep = 1 if not inside.startswith('#for') else 3
cont_label = get_continue_label(label)
temp = inside.split('\n')
injected = 'try:\n'+'\n'.join(temp[sep:])
injected += 'except %s:\n pass\n'%cont_label
inside = '\n'.join(temp[:sep])+'\n'+indent(injected)
defs += 'class %s(Exception): pass\n'%cont_label
break_label = get_break_label(label)
inside = 'try:\n%sexcept %s:\n pass\n'% (indent(inside), break_label)
defs += 'class %s(Exception): pass\n'%break_label
return defs + inside, end
def do_for(source, start):
start += 3 # pass for
entered = start
bra, start = pass_bracket(source, start , '()')
inside, start = do_statement(source, start)
if inside is None:
raise SyntaxError('Missing statement after for')
bra = bra[1:-1]
if ';' in bra:
init = argsplit(bra, ';')
if len(init)!=3:
raise SyntaxError('Invalid for statement')
args = []
for i, item in enumerate(init):
end = pass_white(item, 0)
if end==len(item):
args.append('' if i!=1 else '1')
continue
if not i and except_keyword(item, end, 'var') is not None:
# var statement
args.append(do_var(item, end)[0])
continue
args.append(do_expression(item, end)[0])
return '#for JS loop\n%swhile %s:\n%s%s\n' %(args[0], args[1].strip(), indent(inside), indent(args[2])), start
# iteration
end = pass_white(bra, 0)
register = False
if bra[end:].startswith('var '):
end+=3
end = pass_white(bra, end)
register = True
name, end = parse_identifier(bra, end)
if register:
TO_REGISTER.append(name)
end = pass_white(bra, end)
if bra[end:end+2]!='in' or bra[end+2] in IDENTIFIER_PART:
#print source[entered-10:entered+50]
raise SyntaxError('Invalid "for x in y" statement')
end+=2 # pass in
exp = exp_translator(bra[end:])
res = 'for temp in %s:\n' % exp
res += indent('var.put(%s, temp)\n' % name.__repr__()) + indent(inside)
return res, start
# todo - IMPORTANT
def do_continue(source, start, name='continue'):
start += len(name) #pass continue
start = pass_white(source, start)
if start<len(source) and source[start] == ';':
return '%s\n'%name, start+1
# labeled statement or error
label, start = parse_identifier(source, start)
start = pass_white(source, start)
if start<len(source) and source[start] != ';':
raise SyntaxError('Missing ; after label name in %s statement'%name)
return 'raise %s("%s")\n' % (get_continue_label(label) if name=='continue' else get_break_label(label), name), start+1
def do_break(source, start):
return do_continue(source, start, 'break')
def do_return(source, start):
start += 6 # pass return
end = source.find(';', start)+1
if end==-1:
end = len(source)
trans = exp_translator(source[start:end].rstrip(';'))
return 'return %s\n' % (trans if trans else "var.get('undefined')"), end
# todo later?- Also important
def do_throw(source, start):
start += 5 # pass throw
end = source.find(';', start)+1
if not end:
end = len(source)
trans = exp_translator(source[start:end].rstrip(';'))
if not trans:
raise SyntaxError('Invalid throw statement: nothing to throw')
res = 'PyJsTempException = JsToPyException(%s)\nraise PyJsTempException\n' % trans
return res, end
def do_try(source, start):
start += 3 # pass try
block, start = do_block(source, start)
result = 'try:\n%s' %indent(block)
catch = except_keyword(source, start, 'catch')
if catch:
bra, catch = pass_bracket(source, catch, '()')
bra = bra[1:-1]
identifier, bra_end = parse_identifier(bra, 0)
holder = 'PyJsHolder_%s_%d'%(identifier.encode('hex'), random.randrange(1e8))
identifier = identifier.__repr__()
bra_end = pass_white(bra, bra_end)
if bra_end<len(bra):
raise SyntaxError('Invalid content of catch statement')
result += 'except PyJsException as PyJsTempException:\n'
block, catch = do_block(source, catch)
# fill in except ( catch ) block and remember to recover holder variable to its previous state
result += indent(TRY_CATCH.replace('HOLDER', holder).replace('NAME', identifier).replace('BLOCK', indent(block)))
start = max(catch, start)
final = except_keyword(source, start, 'finally')
if not (final or catch):
raise SyntaxError('Try statement has to be followed by catch or finally')
if not final:
return result, start
# translate finally statement
block, start = do_block(source, final)
return result + 'finally:\n%s' % indent(block), start
def do_debugger(source, start):
start += 8 # pass debugger
end = pass_white(source, start)
if end<len(source) and source[end]==';':
end += 1
return 'pass\n', end #ignore errors...
# todo automatic ; insertion. fuck this crappy feature
# Least important
def do_switch(source, start):
start += 6 # pass switch
code = 'while 1:\n' + indent('SWITCHED = False\nCONDITION = (%s)\n')
# parse value of check
val, start = pass_bracket(source, start, '()')
if val is None:
raise SyntaxError('Missing () after switch statement')
if not val.strip():
raise SyntaxError('Missing content inside () after switch statement')
code = code % exp_translator(val)
bra, start = pass_bracket(source, start, '{}')
if bra is None:
raise SyntaxError('Missing block {} after switch statement')
bra_pos = 0
bra = bra[1:-1] + ';'
while True:
case = except_keyword(bra, bra_pos, 'case')
default = except_keyword(bra, bra_pos, 'default')
assert not (case and default)
if case or default: # this ?: expression makes things much harder....
case_code = None
if case:
case_code = 'if SWITCHED or PyJsStrictEq(CONDITION, %s):\n'
# we are looking for a first : with count 1. ? gives -1 and : gives +1.
count = 0
for pos, e in enumerate(bra[case:], case):
if e=='?':
count -= 1
elif e==':':
count += 1
if count==1:
break
else:
raise SyntaxError('Missing : token after case in switch statement')
case_condition = exp_translator(bra[case:pos]) # switch {case CONDITION: statements}
case_code = case_code % case_condition
case = pos + 1
if default:
case = except_token(bra, default, ':')
case_code = 'if True:\n'
# now parse case statements (things after ':' )
cand, case = do_statement(bra, case)
while cand:
case_code += indent(cand)
cand, case = do_statement(bra, case)
case_code += indent('SWITCHED = True\n')
code += indent(case_code)
bra_pos = case
else:
break
# prevent infinite loop :)
code += indent('break\n')
return code, start
def do_pyimport(source, start):
start += 8
lib, start = parse_identifier(source, start)
jlib = 'PyImport_%s' % lib
code = 'import %s as %s\n' % (lib, jlib)
#check whether valid lib name...
try:
compile(code, '', 'exec')
except:
raise SyntaxError('Invalid Python module name (%s) in pyimport statement'%lib)
# var.pyimport will handle module conversion to PyJs object
code += 'var.pyimport(%s, %s)\n' % (repr(lib), jlib)
return code, start
def do_with(source, start):
raise NotImplementedError('With statement is not implemented yet :(')
KEYWORD_METHODS = {'do': do_dowhile,
'while': do_while,
'if': do_if,
'throw': do_throw,
'return': do_return,
'continue': do_continue,
'break': do_break,
'try': do_try,
'for': do_for,
'switch': do_switch,
'var': do_var,
'debugger': do_debugger, # this one does not do anything
'with': do_with,
'pyimport': do_pyimport
}
#Also not specific statements (harder to detect)
# Block {}
# Expression or Empty Statement
# Label
#
# Its easy to recognize block but harder to distinguish between label and expression statement
def translate_flow(source):
"""Source cant have arrays, object, constant or function literals.
Returns PySource and variables to register"""
global TO_REGISTER
TO_REGISTER = []
return do_block('{%s}'%source, 0)[0], TO_REGISTER
if __name__=='__main__':
#print do_dowhile('do {} while(k+f)', 0)[0]
#print 'e: "%s"'%do_expression('++(c?g:h); mj', 0)[0]
print translate_flow('a; yimport test')[0]
|
gui | buttonmap | # This file is part of MyPaint.
# Copyright (C) 2012-2019 by the MyPaint Development Team.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Button press mapping."""
from __future__ import division, print_function
import logging
from gettext import gettext as _
import lib.xml
from lib.gibindings import Gdk, GObject, Gtk, Pango
from lib.pycompat import unicode
from . import widgets
logger = logging.getLogger(__name__)
def button_press_name(button, mods):
"""Converts button number & modifier mask to a prefs-storable string.
Analogous to `Gtk.accelerator_name()`. Buttonpress names look similar to
GDK accelerator names, for example ``<Control><Shift>Button2`` or
``<Primary><Alt>Button4`` for newer versions of GTK. If the button is
equal to zero (see `button_press_parse()`), `None` is returned.
"""
button = int(button)
mods = int(mods)
if button <= 0:
return None
mods = Gdk.ModifierType(mods)
modif_name = Gtk.accelerator_name(0, mods)
return modif_name + "Button%d" % (button,)
def button_press_displayname(button, mods, shorten=False):
"""Converts a button number & modifier mask to a localized unicode string."""
button = int(button)
mods = int(mods)
if button <= 0:
return None
mods = Gdk.ModifierType(mods)
modif_label = Gtk.accelerator_get_label(0, mods)
modif_label = unicode(modif_label)
separator = ""
if modif_label:
separator = "+"
# TRANSLATORS: "Button" refers to a mouse button
# TRANSLATORS: It is part of a button map label.
mouse_button_label = _("Button")
if shorten:
# TRANSLATORS: abbreviated "Button <number>" for forms like "Alt+Btn1"
mouse_button_label = _("Btn")
return "{modifiers}{plus}{btn}{button_number}".format(
modifiers=modif_label,
plus=separator,
btn=mouse_button_label,
button_number=button,
)
def button_press_parse(name):
"""Converts button press names to a button number & modifier mask.
Analogous to `Gtk.accelerator_parse()`. This function parses the strings
created by `button_press_name()`, and returns a 2-tuple containing the
button number and modifier mask corresponding to `name`. If the parse
fails, both values will be 0 (zero).
"""
if name is None:
return (0, 0)
name = str(name)
try:
mods_s, button_s = name.split("Button", 1)
if button_s == "":
button = 0
else:
button = int(button_s)
except ValueError:
button = 0
mods = Gdk.ModifierType(0)
else:
keyval_ignored, mods = Gtk.accelerator_parse(mods_s)
return button, mods
def get_handler_object(app, action_name):
"""Find a (nominal) handler for a named buttonmap action.
:param app: MyPaint application instance to use for the lookup
:param action_name: machine-readable action name string.
:rtype: tuple of the form (handler_type, handler_obj)
Defined handler_type strings and their handler_objs are: "mode_class" (an
instantiable InteractionMode class), "popup_state" (an activatable popup
state), "gtk_action" (an activatable Gtk.Action), or "no_handler" (the
value None).
"""
from gui.mode import InteractionMode, ModeRegistry
mode_class = ModeRegistry.get_mode_class(action_name)
if mode_class is not None:
assert issubclass(mode_class, InteractionMode)
return ("mode_class", mode_class)
elif action_name in app.drawWindow.popup_states:
popup_state = app.drawWindow.popup_states[action_name]
return ("popup_state", popup_state)
else:
action = app.find_action(action_name)
if action is not None:
return ("gtk_action", action)
else:
return ("no_handler", None)
class ButtonMapping(object):
"""Button mapping table.
An instance resides in the application, and is updated by the preferences
window.
"""
def __init__(self):
super(ButtonMapping, self).__init__()
self._mapping = {}
self._modifiers = []
def update(self, mapping):
"""Updates from a prefs sub-hash.
:param mapping: dict of button_press_name()s to action names.
A reference is not maintained.
"""
self._mapping = {}
self._modifiers = []
for bp_name, action_name in mapping.items():
button, modifiers = button_press_parse(bp_name)
if modifiers not in self._mapping:
self._mapping[modifiers] = {}
self._mapping[modifiers][button] = action_name
self._modifiers.append((modifiers, button, action_name))
def get_unique_action_for_modifiers(self, modifiers, button=1):
"""Gets a single, unique action name for a modifier mask.
:param modifiers: a bitmask of GDK Modifier Constants
:param button: the button number to require; defaults to 1.
:rtype: string containing an action name, or None
"""
try:
modmap = self._mapping[modifiers]
if len(modmap) > 1:
return None
return self._mapping[modifiers][button]
except KeyError:
return None
def lookup(self, modifiers, button):
"""Look up a single pointer binding efficiently.
:param modifiers: a bitmask of GDK Modifier Constants.
:type modifiers: GdkModifierType or int
:param button: a button number
:type button: int
:rtype: string containing an action name, or None
"""
if modifiers not in self._mapping:
return None
return self._mapping[modifiers].get(button, None)
def lookup_possibilities(self, modifiers):
"""Find potential actions, reachable via buttons or more modifiers
:param modifiers: a bitmask of GDK Modifier Constants.
:type modifiers: GdkModifierType or int
:rtype: list
Returns those actions which can be reached from the currently held
modifier keys by either pressing a pointer button right now, or by
holding down additional modifiers and then pressing a pointer button.
If `modifiers` is empty, an empty list will be returned.
Each element in the returned list is a 3-tuple of the form ``(MODS,
BUTTON, ACTION NAME)``.
"""
# This enables us to display:
# "<Ctrl>: with <Shift>+Button1, ACTION1; with Button3, ACTION2."
# while the modifiers are pressed, but the button isn't. Also if
# only a single possibility is returned, the handler should just
# enter the mode as a springload (and display what just happened!)
possibilities = []
for possible, btn, action in self._modifiers:
# Exclude possible bindings whose modifiers do not overlap
if (modifiers & possible) != modifiers:
continue
# Include only exact matches, and those possibilities which can be
# reached by pressing more modifier keys.
if modifiers == possible or ~modifiers & possible:
possibilities.append((possible, btn, action))
return possibilities
class ButtonMappingEditor(Gtk.EventBox):
"""Editor for a prefs hash of pointer bindings mapped to action strings."""
__gtype_name__ = "ButtonMappingEditor"
def __init__(self):
"""Initialise."""
super(ButtonMappingEditor, self).__init__()
import gui.application
self.app = gui.application.get_app()
self.actions = set()
self.default_action = None
self.bindings = None #: dict of bindings being edited
self.vbox = Gtk.VBox()
self.add(self.vbox)
# Display strings for action names
self.action_labels = dict()
# Model: combo cellrenderer's liststore
ls = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING)
self.action_liststore = ls
self.action_liststore_value_column = 0
self.action_liststore_display_column = 1
# Model: main list's liststore
# This is reflected into self.bindings when it changes
column_types = [GObject.TYPE_STRING] * 3
ls = Gtk.ListStore(*column_types)
self.action_column = 0
self.bp_column = 1
self.bpd_column = 2
for sig in ("row-changed", "row-deleted", "row_inserted"):
ls.connect(sig, self._liststore_updated_cb)
self.liststore = ls
# Bindings hash observers, external interface
self.bindings_observers = [] #: List of cb(editor) callbacks
# View: treeview
scrolledwin = Gtk.ScrolledWindow()
scrolledwin.set_shadow_type(Gtk.ShadowType.IN)
tv = Gtk.TreeView()
tv.set_model(ls)
scrolledwin.add(tv)
self.vbox.pack_start(scrolledwin, True, True, 0)
tv.set_size_request(480, 320)
tv.set_headers_clickable(True)
self.treeview = tv
self.selection = tv.get_selection()
self.selection.connect("changed", self._selection_changed_cb)
# Column 0: action name
cell = Gtk.CellRendererCombo()
cell.set_property("model", self.action_liststore)
cell.set_property("text-column", self.action_liststore_display_column)
cell.set_property("mode", Gtk.CellRendererMode.EDITABLE)
cell.set_property("editable", True)
cell.set_property("has-entry", False)
cell.connect("changed", self._action_cell_changed_cb)
# TRANSLATORS: Name of first column in the button map preferences.
# TRANSLATORS: Refers to an action bound to a mod+button combination.
col = Gtk.TreeViewColumn(_("Action"), cell)
col.set_cell_data_func(cell, self._liststore_action_datafunc)
col.set_min_width(150)
col.set_resizable(False)
col.set_expand(False)
col.set_sort_column_id(self.action_column)
tv.append_column(col)
# Column 1: button press
cell = Gtk.CellRendererText()
cell.set_property("ellipsize", Pango.EllipsizeMode.END)
cell.set_property("mode", Gtk.CellRendererMode.EDITABLE)
cell.set_property("editable", True)
cell.connect("edited", self._bp_cell_edited_cb)
cell.connect("editing-started", self._bp_cell_editing_started_cb)
# TRANSLATORS: Name of second column in the button map preferences.
# TRANSLATORS: Column lists mod+button combinations (bound to actions)
# TRANSLATORS: E.g. Button1 or Ctrl+Button2 or Alt+Button3
col = Gtk.TreeViewColumn(_("Button press"), cell)
col.add_attribute(cell, "text", self.bpd_column)
col.set_expand(True)
col.set_resizable(True)
col.set_min_width(200)
col.set_sort_column_id(self.bpd_column)
tv.append_column(col)
# List editor toolbar
list_tools = Gtk.Toolbar()
list_tools.set_style(Gtk.ToolbarStyle.ICONS)
list_tools.set_icon_size(widgets.ICON_SIZE_LARGE)
context = list_tools.get_style_context()
context.add_class("inline-toolbar")
self.vbox.pack_start(list_tools, False, False, 0)
# Add binding
btn = Gtk.ToolButton()
btn.set_tooltip_text(_("Add a new binding"))
btn.set_icon_name("mypaint-add-symbolic")
btn.connect("clicked", self._add_button_clicked_cb)
list_tools.add(btn)
# Remove (inactive if list is empty)
btn = Gtk.ToolButton()
btn.set_icon_name("mypaint-remove-symbolic")
btn.set_tooltip_text(_("Remove the current binding"))
btn.connect("clicked", self._remove_button_clicked_cb)
list_tools.add(btn)
self.remove_button = btn
self._updating_model = False
def set_actions(self, actions):
"""Sets the internal list of possible actions.
:param actions: List of all possible action strings. The 0th
entry in the list is the default.
:type actions: indexable sequence
"""
self.default_action = actions[0]
self.actions = set(actions)
labels_list = sorted((self._get_action_label(a), a) for a in actions)
self.action_liststore.clear()
for label, act in labels_list:
self.action_labels[act] = label
self.action_liststore.append((act, label))
def _liststore_action_datafunc(self, column, cell, model, iter, *user_data):
action_name = model.get_value(iter, self.action_column)
label = self.action_labels.get(action_name, action_name)
cell.set_property("text", label)
def _get_action_label(self, action_name):
# Get a displayable (and translated) string for an action name
handler_type, handler = get_handler_object(self.app, action_name)
action_label = action_name
if handler_type == "gtk_action":
action_label = handler.get_label()
elif handler_type == "popup_state":
action_label = handler.label
elif handler_type == "mode_class":
action_label = handler.get_name()
if handler.ACTION_NAME is not None:
action = self.app.find_action(handler.ACTION_NAME)
if action is not None:
action_label = action.get_label()
if action_label is None:
action_label = "" # Py3+: str cannot be compared to None
return action_label
def set_bindings(self, bindings):
"""Sets the mapping of binding names to actions.
:param bindings: Mapping of pointer binding names to their actions. A
reference is kept internally, and the entries will be
modified.
:type bindings: dict of bindings being edited
The binding names in ``bindings`` will be canonicalized from the older
``<Control>`` prefix to ``<Primary>`` if supported by this Gtk.
"""
tmp_bindings = dict(bindings)
bindings.clear()
for bp_name, action_name in tmp_bindings.items():
bp_name = button_press_name(*button_press_parse(bp_name))
bindings[bp_name] = action_name
self.bindings = bindings
self._bindings_changed_cb()
def _bindings_changed_cb(self):
"""Updates the editor list to reflect the prefs hash changing."""
self._updating_model = True
self.liststore.clear()
for bp_name, action_name in self.bindings.items():
bp_displayname = button_press_displayname(*button_press_parse(bp_name))
self.liststore.append((action_name, bp_name, bp_displayname))
self._updating_model = False
self._update_list_buttons()
def _liststore_updated_cb(self, ls, *args, **kwargs):
if self._updating_model:
return
iter = ls.get_iter_first()
self.bindings.clear()
while iter is not None:
bp_name, action = ls.get(iter, self.bp_column, self.action_column)
if action in self.actions and bp_name is not None:
self.bindings[bp_name] = action
iter = ls.iter_next(iter)
self._update_list_buttons()
for func in self.bindings_observers:
func(self)
def _selection_changed_cb(self, selection):
if self._updating_model:
return
self._update_list_buttons()
def _update_list_buttons(self):
is_populated = len(self.bindings) > 0
has_selected = self.selection.count_selected_rows() > 0
self.remove_button.set_sensitive(is_populated and has_selected)
def _add_button_clicked_cb(self, button):
added_iter = self.liststore.append((self.default_action, None, None))
self.selection.select_iter(added_iter)
added_path = self.liststore.get_path(added_iter)
focus_col = self.treeview.get_column(self.action_column)
self.treeview.set_cursor_on_cell(added_path, focus_col, None, True)
def _remove_button_clicked_cb(self, button):
if self.selection.count_selected_rows() > 0:
ls, selected = self.selection.get_selected()
ls.remove(selected)
## "Controller" callbacks
def _action_cell_changed_cb(self, combo, path_string, new_iter, *etc):
action_name = self.action_liststore.get_value(
new_iter, self.action_liststore_value_column
)
iter = self.liststore.get_iter(path_string)
self.liststore.set_value(iter, self.action_column, action_name)
self.treeview.columns_autosize()
# If we don't have a button-press name yet, edit that next
bp_name = self.liststore.get_value(iter, self.bp_column)
if bp_name is None:
focus_col = self.treeview.get_column(self.bp_column)
tree_path = Gtk.TreePath(path_string)
self.treeview.set_cursor_on_cell(tree_path, focus_col, None, True)
def _bp_cell_edited_cb(self, cell, path, bp_name):
iter = self.liststore.get_iter(path)
bp_displayname = button_press_displayname(*button_press_parse(bp_name))
self.liststore.set_value(iter, self.bp_column, bp_name)
self.liststore.set_value(iter, self.bpd_column, bp_displayname)
def _bp_cell_editing_started_cb(self, cell, editable, path):
iter = self.liststore.get_iter(path)
action_name = self.liststore.get_value(iter, self.action_column)
bp_name = self.liststore.get_value(iter, self.bp_column)
bp_displayname = button_press_displayname(*button_press_parse(bp_name))
editable.set_sensitive(False)
dialog = Gtk.Dialog()
dialog.set_modal(True)
dialog.set_title(_("Edit binding for '%s'") % action_name)
dialog.set_transient_for(self.get_toplevel())
dialog.set_position(Gtk.WindowPosition.CENTER_ON_PARENT)
dialog.add_buttons(
Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK
)
dialog.set_default_response(Gtk.ResponseType.OK)
dialog.connect("response", self._bp_edit_dialog_response_cb, editable)
dialog.ok_btn = dialog.get_widget_for_response(Gtk.ResponseType.OK)
dialog.ok_btn.set_sensitive(bp_name is not None)
evbox = Gtk.EventBox()
evbox.set_border_width(12)
evbox.connect(
"button-press-event", self._bp_edit_box_button_press_cb, dialog, editable
)
evbox.connect("enter-notify-event", self._bp_edit_box_enter_cb)
table = Gtk.Table(3, 2)
table.set_row_spacings(12)
table.set_col_spacings(12)
row = 0
label = Gtk.Label()
label.set_alignment(0, 0.5)
# TRANSLATORS: Part of interface when adding a new button map binding.
# TRANSLATORS: It's a label for the action part of the combination.
# TRANSLATORS: Probably always the same as the column name
# TRANSLATORS: "Action" with a trailing ":" or lang-specific symbol
label.set_text(_("Action:"))
table.attach(label, 0, 1, row, row + 1, Gtk.AttachOptions.FILL)
label = Gtk.Label()
label.set_alignment(0, 0.5)
label.set_text(str(action_name))
table.attach(
label, 1, 2, row, row + 1, Gtk.AttachOptions.FILL | Gtk.AttachOptions.EXPAND
)
row += 1
label = Gtk.Label()
label.set_alignment(0, 0.5)
# TRANSLATORS: Part of interface when adding a new button map binding.
# TRANSLATORS: It's a label for the mod+button part of the combination.
# TRANSLATORS: Probably always the same as "Button press" (column name)
# TRANSLATORS: but with a trailing ":" or other lang-specific symbol.
label.set_text(_("Button press:"))
table.attach(label, 0, 1, row, row + 1, Gtk.AttachOptions.FILL)
label = Gtk.Label()
label.set_alignment(0, 0.5)
label.set_text(str(bp_displayname))
dialog.bp_name = bp_name
dialog.bp_name_orig = bp_name
dialog.bp_label = label
table.attach(
label, 1, 2, row, row + 1, Gtk.AttachOptions.FILL | Gtk.AttachOptions.EXPAND
)
row += 1
label = Gtk.Label()
label.set_size_request(300, 75)
label.set_alignment(0, 0)
label.set_line_wrap(True)
dialog.hint_label = label
self._bp_edit_dialog_set_standard_hint(dialog)
table.attach(
label,
0,
2,
row,
row + 1,
Gtk.AttachOptions.FILL | Gtk.AttachOptions.EXPAND,
Gtk.AttachOptions.FILL | Gtk.AttachOptions.EXPAND,
0,
12,
)
evbox.add(table)
dialog.get_content_area().pack_start(evbox, True, True, 0)
evbox.show_all()
dialog.show()
def _bp_edit_dialog_set_error(self, dialog, markup):
dialog.hint_label.set_markup("<span foreground='red'>%s</span>" % markup)
def _bp_edit_dialog_set_standard_hint(self, dialog):
markup = _(
"Hold down modifier keys, and press a button "
"over this text to set a new binding."
)
dialog.hint_label.set_markup(markup)
def _bp_edit_box_enter_cb(self, evbox, event):
window = evbox.get_window()
disp = window.get_display()
try: # Wayland themes are a bit incomplete
cursor = Gdk.Cursor.new_for_display(disp, Gdk.CursorType.CROSSHAIR)
window.set_cursor(cursor)
except Exception:
logger.exception("Cursor setting failed") # and otherwise ignore
def _bp_edit_dialog_response_cb(self, dialog, response_id, editable):
if response_id == Gtk.ResponseType.OK:
if dialog.bp_name is not None:
editable.set_text(dialog.bp_name)
editable.editing_done()
editable.remove_widget()
dialog.destroy()
def _bp_edit_box_button_press_cb(self, evbox, event, dialog, editable):
modifiers = event.state & Gtk.accelerator_get_default_mod_mask()
bp_name = button_press_name(event.button, modifiers)
bp_displayname = button_press_displayname(event.button, modifiers)
if modifiers == 0 and event.button == 1:
self._bp_edit_dialog_set_error(
dialog,
# TRANSLATORS: "fixed" in the sense of "static" -
# TRANSLATORS: something which cannot be changed
_(
"{button} cannot be bound without modifier keys "
"(its meaning is fixed, sorry)"
).format(
button=lib.xml.escape(bp_displayname),
),
)
dialog.ok_btn.set_sensitive(False)
return
action = None
if bp_name != dialog.bp_name_orig:
action = self.bindings.get(bp_name, None)
if action is not None:
action_label = self.action_labels.get(action, action)
self._bp_edit_dialog_set_error(
dialog,
_(
"{button_combination} is already bound "
"to the action '{action_name}'"
).format(
button_combination=lib.xml.escape(str(bp_displayname)),
action_name=lib.xml.escape(str(action_label)),
),
)
dialog.ok_btn.set_sensitive(False)
else:
self._bp_edit_dialog_set_standard_hint(dialog)
dialog.bp_name = bp_name
dialog.bp_label.set_text(str(bp_displayname))
dialog.ok_btn.set_sensitive(True)
dialog.ok_btn.grab_focus()
|
extractor | storyfire | # coding: utf-8
from __future__ import unicode_literals
import functools
from ..utils import OnDemandPagedList, int_or_none, smuggle_url # HEADRequest,
from .common import InfoExtractor
class StoryFireBaseIE(InfoExtractor):
_VALID_URL_BASE = r"https?://(?:www\.)?storyfire\.com/"
def _call_api(self, path, video_id, resource, query=None):
return self._download_json(
"https://storyfire.com/app/%s/%s" % (path, video_id),
video_id,
"Downloading %s JSON metadata" % resource,
query=query,
)
def _parse_video(self, video):
title = video["title"]
vimeo_id = self._search_regex(
r"https?://player\.vimeo\.com/external/(\d+)",
video["vimeoVideoURL"],
"vimeo id",
)
# video_url = self._request_webpage(
# HEADRequest(video['vimeoVideoURL']), video_id).geturl()
# formats = []
# for v_url, suffix in [(video_url, '_sep'), (video_url.replace('/sep/video/', '/video/'), '')]:
# formats.extend(self._extract_m3u8_formats(
# v_url, video_id, 'mp4', 'm3u8_native',
# m3u8_id='hls' + suffix, fatal=False))
# formats.extend(self._extract_mpd_formats(
# v_url.replace('.m3u8', '.mpd'), video_id,
# mpd_id='dash' + suffix, fatal=False))
# self._sort_formats(formats)
uploader_id = video.get("hostID")
return {
"_type": "url_transparent",
"id": vimeo_id,
"title": title,
"description": video.get("description"),
"url": smuggle_url(
"https://player.vimeo.com/video/" + vimeo_id,
{
"http_headers": {
"Referer": "https://storyfire.com/",
}
},
),
# 'formats': formats,
"thumbnail": video.get("storyImage"),
"view_count": int_or_none(video.get("views")),
"like_count": int_or_none(video.get("likesCount")),
"comment_count": int_or_none(video.get("commentsCount")),
"duration": int_or_none(video.get("videoDuration")),
"timestamp": int_or_none(video.get("publishDate")),
"uploader": video.get("username"),
"uploader_id": uploader_id,
"uploader_url": "https://storyfire.com/user/%s/video" % uploader_id
if uploader_id
else None,
"episode_number": int_or_none(
video.get("episodeNumber") or video.get("episode_number")
),
}
class StoryFireIE(StoryFireBaseIE):
_VALID_URL = StoryFireBaseIE._VALID_URL_BASE + r"video-details/(?P<id>[0-9a-f]{24})"
_TEST = {
"url": "https://storyfire.com/video-details/5df1d132b6378700117f9181",
"md5": "caec54b9e4621186d6079c7ec100c1eb",
"info_dict": {
"id": "378954662",
"ext": "mp4",
"title": "Buzzfeed Teaches You About Memes",
"uploader_id": "ntZAJFECERSgqHSxzonV5K2E89s1",
"timestamp": 1576129028,
"description": "md5:0b4e28021548e144bed69bb7539e62ea",
"uploader": "whang!",
"upload_date": "20191212",
"duration": 418,
"view_count": int,
"like_count": int,
"comment_count": int,
},
"params": {
"skip_download": True,
},
"expected_warnings": ["Unable to download JSON metadata"],
}
def _real_extract(self, url):
video_id = self._match_id(url)
video = self._call_api("generic/video-detail", video_id, "video")["video"]
return self._parse_video(video)
class StoryFireUserIE(StoryFireBaseIE):
_VALID_URL = StoryFireBaseIE._VALID_URL_BASE + r"user/(?P<id>[^/]+)/video"
_TEST = {
"url": "https://storyfire.com/user/UQ986nFxmAWIgnkZQ0ftVhq4nOk2/video",
"info_dict": {
"id": "UQ986nFxmAWIgnkZQ0ftVhq4nOk2",
},
"playlist_mincount": 151,
}
_PAGE_SIZE = 20
def _fetch_page(self, user_id, page):
videos = self._call_api(
"publicVideos",
user_id,
"page %d" % (page + 1),
{
"skip": page * self._PAGE_SIZE,
},
)["videos"]
for video in videos:
yield self._parse_video(video)
def _real_extract(self, url):
user_id = self._match_id(url)
entries = OnDemandPagedList(
functools.partial(self._fetch_page, user_id), self._PAGE_SIZE
)
return self.playlist_result(entries, user_id)
class StoryFireSeriesIE(StoryFireBaseIE):
_VALID_URL = (
StoryFireBaseIE._VALID_URL_BASE + r"write/series/stories/(?P<id>[^/?&#]+)"
)
_TESTS = [
{
"url": "https://storyfire.com/write/series/stories/-Lq6MsuIHLODO6d2dDkr/",
"info_dict": {
"id": "-Lq6MsuIHLODO6d2dDkr",
},
"playlist_mincount": 13,
},
{
"url": "https://storyfire.com/write/series/stories/the_mortal_one/",
"info_dict": {
"id": "the_mortal_one",
},
"playlist_count": 0,
},
]
def _extract_videos(self, stories):
for story in stories.values():
if story.get("hasVideo"):
yield self._parse_video(story)
def _real_extract(self, url):
series_id = self._match_id(url)
stories = self._call_api("seriesStories", series_id, "series stories")
return self.playlist_result(self._extract_videos(stories), series_id)
|
controllers | editor_rect | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2021 by Ihor E. Novikov
# Copyright (C) 2021 by Maxim S. Barabash
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from generic import AbstractController
from sk1 import _, config, events, modes
from uc2 import libgeom
H_ORIENT = ["00", "11", "20", "31"]
EPSILON = 0.000001
class RectEditor(AbstractController):
mode = modes.RECT_EDITOR_MODE
target = None
points = []
midpoints = []
selected_obj = None
resizing = False
res_index = 0
orig_rect = []
rounding = False
rnd_index = 0
rnd_subindex = 0
orig_corners = []
start = []
stop = []
start2 = []
stop2 = []
def __init__(self, canvas, presenter):
AbstractController.__init__(self, canvas, presenter)
def start_(self):
self.snap = self.presenter.snap
self.target = self.selection.objs[0]
self.resizing = False
self.rounding = False
self.selected_obj = None
self.update_points()
self.selection.clear()
msg = _("Rectangle in editing")
events.emit(events.APP_STATUS, msg)
def stop_(self):
self.selection.set(
[
self.target,
]
)
self.target = None
self.selected_obj = None
def update_points(self):
self.points = []
self.midpoints = []
mps = self.target.get_midpoints()
for item in mps:
self.midpoints.append(MidPoint(self.canvas, self.target, item))
corner_points = self.target.get_corner_points()
stops = self.target.get_stops()
for index in range(4):
if not self.target.corners[index]:
start = corner_points[index]
stop = stops[index][0]
stop2 = stops[index - 1]
if len(stop2) == 2:
stop2 = stop2[1]
else:
stop2 = stop2[0]
coef = self.target.corners[index]
self.points.append(
ControlPoint(
self.canvas,
self.target,
start,
stop,
stop2=stop2,
coef=coef,
index=index,
)
)
elif self.target.corners[index] == 1.0:
start = corner_points[index]
stop = stops[index - 1]
if len(stop) == 2:
stop = stop[1]
coef = self.target.corners[index]
self.points.append(
ControlPoint(
self.canvas,
self.target,
start,
stop,
coef=coef,
index=index,
)
)
elif not self.target.corners[index - 1] == 1.0:
stop = stop[0]
coef = self.target.corners[index]
self.points.append(
ControlPoint(
self.canvas,
self.target,
start,
stop,
coef=coef,
index=index,
)
)
stop = stops[index][0]
start2 = []
if len(stops[index]) == 1 and self.target.corners[index - 3] == 1.0:
start2 = corner_points[index - 3]
coef = self.target.corners[index]
self.points.append(
ControlPoint(
self.canvas,
self.target,
start,
stop,
start2=start2,
coef=coef,
index=index,
subindex=1,
)
)
else:
start = corner_points[index]
stop = stops[index - 1]
if len(stop) == 2:
stop = stop[1]
else:
stop = stop[0]
coef = self.target.corners[index]
self.points.append(
ControlPoint(
self.canvas, self.target, start, stop, coef=coef, index=index
)
)
stop = stops[index][0]
self.points.append(
ControlPoint(
self.canvas,
self.target,
start,
stop,
coef=coef,
index=index,
subindex=1,
)
)
msg = _("Rectangle in editing")
events.emit(events.APP_STATUS, msg)
def stop_(self):
self.selection.set(
[
self.target,
]
)
self.target = None
self.selected_obj = None
def escape_pressed(self):
self.canvas.set_mode()
# ----- REPAINT
def repaint(self):
x0, y0, x1, y1 = self.target.cache_bbox
p0 = self.canvas.point_doc_to_win([x0, y0])
p1 = self.canvas.point_doc_to_win([x1, y1])
self.canvas.renderer.draw_frame(p0, p1)
for item in self.midpoints:
item.repaint()
for item in self.points:
item.repaint()
# ----- CHANGE APPLY
def apply_resizing(self, point, final=False):
wpoint = self.canvas.point_win_to_doc(point)
invtrafo = libgeom.invert_trafo(self.target.trafo)
wpoint = libgeom.apply_trafo_to_point(wpoint, invtrafo)
rect = self.target.get_rect()
corners = [] + self.target.corners
if self.res_index == 0:
rect[2] -= wpoint[0] - rect[0]
rect[0] = wpoint[0]
if rect[2] < 0:
self.res_index = 2
c0, c1, c2, c3 = corners
corners = [c3, c2, c1, c0]
elif self.res_index == 1:
rect[3] = wpoint[1] - rect[1]
if rect[3] < 0:
self.res_index = 3
c0, c1, c2, c3 = corners
corners = [c1, c0, c3, c2]
elif self.res_index == 2:
rect[2] = wpoint[0] - rect[0]
if rect[2] < 0:
self.res_index = 0
c0, c1, c2, c3 = corners
corners = [c3, c2, c1, c0]
elif self.res_index == 3:
rect[3] -= wpoint[1] - rect[1]
rect[1] = wpoint[1]
if rect[3] < 0:
self.res_index = 1
c0, c1, c2, c3 = corners
corners = [c1, c0, c3, c2]
rect = libgeom.normalize_rect(rect)
if final:
self.api.set_rect_final(self.target, rect, self.orig_rect)
if not corners == self.orig_corners:
self.api.set_rect_corners_final(corners, self.orig_corners, self.target)
self.orig_corners = [] + self.target.corners
self.orig_rect = self.target.get_rect()
else:
self.api.set_rect(self.target, rect)
if not corners == self.target.corners:
self.api.set_rect_corners(corners, self.target)
self.update_points()
def apply_rounding(self, point, final=False, inplace=False):
wpoint = self.canvas.point_win_to_doc(point)
invtrafo = libgeom.invert_trafo(self.target.trafo)
wpoint = libgeom.apply_trafo_to_point(wpoint, invtrafo)
corners = [] + self.target.corners
name = str(self.rnd_index) + str(self.rnd_subindex)
if self.stop2:
val = abs(wpoint[0] - self.start[0])
val2 = abs(wpoint[1] - self.start[1])
start = self.start
if val > val2:
if self.rnd_index in (0, 2):
stop = self.stop2
res = (wpoint[0] - start[0]) / (stop[0] - start[0])
else:
stop = self.stop
res = (wpoint[0] - start[0]) / (stop[0] - start[0])
else:
if self.rnd_index in (0, 2):
stop = self.stop
res = (wpoint[1] - start[1]) / (stop[1] - start[1])
else:
stop = self.stop2
res = (wpoint[1] - start[1]) / (stop[1] - start[1])
else:
start = self.start
stop = self.stop
if name in H_ORIENT:
res = (wpoint[0] - start[0]) / (stop[0] - start[0])
else:
res = (wpoint[1] - start[1]) / (stop[1] - start[1])
res = 0.0 if res < 0.0 else res
res = 1.0 if res > 1.0 else res
if inplace:
corners[self.rnd_index] = res
else:
corners = [res, res, res, res]
if final:
self.api.set_rect_corners_final(corners, self.orig_corners, self.target)
else:
self.api.set_rect_corners(corners, self.target)
self.update_points()
# ----- MOUSE CONTROLLING
def mouse_down(self, event):
self.resizing = False
self.rounding = False
self.selected_obj = None
self.end = event.get_point()
for item in self.points:
if item.is_pressed(self.end):
self.rounding = True
self.rnd_index = item.index
self.rnd_subindex = item.subindex
self.orig_corners = [] + self.target.corners
self.start = [] + item.start
self.start2 = [] + item.start2
self.stop = [] + item.stop
self.stop2 = [] + item.stop2
return
for item in self.midpoints:
if item.is_pressed(self.end):
self.resizing = True
self.res_index = self.midpoints.index(item)
self.orig_rect = self.target.get_rect()
self.orig_corners = [] + self.target.corners
return
objs = self.canvas.pick_at_point(self.end)
if objs and not objs[0] == self.target:
self.selected_obj = objs[0]
def mouse_up(self, event):
if self.resizing:
self.resizing = False
self.apply_resizing(self.end, True)
elif self.rounding:
self.rounding = False
self.apply_rounding(self.end, True, event.is_ctrl())
elif self.selected_obj:
self.target = self.selected_obj
self.canvas.set_mode(modes.SHAPER_MODE)
self.end = []
def mouse_move(self, event):
self.end = event.get_point()
is_snapping = not event.is_shift()
if self.resizing:
if is_snapping:
self.end = self._snap_midpoints(self.end)
self.apply_resizing(self.end)
elif self.rounding:
if is_snapping:
self.end = self._snap_respoints(self.end)
self.apply_rounding(self.end, inplace=event.is_ctrl())
def mouse_double_click(self, event):
self.canvas.set_mode()
def _snap_respoints(self, point):
p0 = None
rnd_index = self.rnd_index
rnd_subindex = self.rnd_subindex
if self.stop2:
for item in self.points:
if item.is_pressed(self.end):
p0 = item.get_screen_point()
rnd_index = item.index
rnd_subindex = item.subindex
break
if p0 is None:
for p in self.points:
if p.index == rnd_index and p.subindex == rnd_subindex:
p0 = p.get_screen_point()
break
if p0:
cp = None
index = rnd_index - (1 - rnd_subindex)
p1 = self.midpoints[index].get_screen_point()
flag, wp, dp = self.snap.snap_point(p0, snap_x=False)
self.snap.active_snap = [None, None]
if flag:
cp = x_intersect(p0, p1, wp[1])
if cp:
closest_point = project_point_to_line(point, p0, p1)
d = libgeom.distance(cp, closest_point)
if d < config.point_sensitivity_size * 2:
self.snap.active_snap = [None, dp[1]]
point = cp
else:
flag, wp, dp = self.snap.snap_point(p0, snap_y=False)
self.snap.active_snap = [None, None]
if flag:
cp = y_intersect(p0, p1, wp[0])
if cp:
closest_point = project_point_to_line(point, p0, p1)
d = libgeom.distance(cp, closest_point)
if d < config.point_sensitivity_size * 2:
self.snap.active_snap = [dp[0], None]
point = cp
return point
def _snap_midpoints(self, point):
p0 = None
p1 = None
if self.res_index == 1:
p0 = self.midpoints[1].get_screen_point()
p1 = self.midpoints[3].get_screen_point()
elif self.res_index == 3:
p0 = self.midpoints[3].get_screen_point()
p1 = self.midpoints[1].get_screen_point()
elif self.res_index == 2:
p0 = self.midpoints[2].get_screen_point()
p1 = self.midpoints[0].get_screen_point()
elif self.res_index == 0:
p0 = self.midpoints[0].get_screen_point()
p1 = self.midpoints[2].get_screen_point()
if p0 is not None:
cp = None
flag, wp, dp = self.snap.snap_point(p0)
if flag and self.snap.active_snap[1] is not None:
cp = x_intersect(p0, p1, wp[1])
if not cp and flag and self.snap.active_snap[0] is not None:
cp = y_intersect(p0, p1, wp[0])
if cp:
closest_point = project_point_to_line(point, p0, p1)
d = libgeom.distance(cp, closest_point)
if d < config.point_sensitivity_size * 2:
point = cp
else:
self.snap.active_snap = [None, None]
return point
class ControlPoint:
canvas = None
target = None
start = []
stop = []
start2 = []
stop2 = []
coef = 0.0
index = 0
subindex = 0
def __init__(
self,
canvas,
target,
start,
stop,
start2=None,
stop2=None,
coef=0.0,
index=0,
subindex=0,
):
self.canvas = canvas
self.target = target
self.start = start
self.start2 = start2 or []
self.stop = stop
self.stop2 = stop2 or []
self.coef = coef
self.index = index
self.subindex = subindex
def get_point(self):
p = libgeom.midpoint(self.start, self.stop, self.coef)
return libgeom.apply_trafo_to_point(p, self.target.trafo)
def get_screen_point(self):
return self.canvas.point_doc_to_win(self.get_point())
def is_pressed(self, win_point):
wpoint = self.canvas.point_doc_to_win(self.get_point())
bbox = libgeom.bbox_for_point(wpoint, config.point_sensitivity_size)
return libgeom.is_point_in_bbox(win_point, bbox)
def repaint(self):
self.canvas.renderer.draw_rect_point(self.get_screen_point())
class MidPoint:
canvas = None
target = None
point = []
callback = None
def __init__(self, canvas, target, point):
self.canvas = canvas
self.target = target
self.point = point
def get_point(self):
return libgeom.apply_trafo_to_point(self.point, self.target.trafo)
def get_screen_point(self):
return self.canvas.point_doc_to_win(self.get_point())
def is_pressed(self, win_point):
wpoint = self.canvas.point_doc_to_win(self.get_point())
bbox = libgeom.bbox_for_point(wpoint, config.point_sensitivity_size)
return libgeom.is_point_in_bbox(win_point, bbox)
def repaint(self):
self.canvas.renderer.draw_rect_midpoint(self.get_screen_point())
def x_intersect(p0, p1, y=0):
"""
Calculates the coordinates of the intersect line and horizontal line.
Horizontal line defined by y coordinate.
:param p0: Start point of the line.
:param p1: End point of the line.
:param y: Horizontal line coordinate.
:return: intersect point or None
"""
dx = p1[0] - p0[0]
dy = p0[1] - p1[1]
# If the line is parallel to the horizontal
if abs(dy) < EPSILON:
return None
c1 = p0[0] * p1[1] - p1[0] * p0[1]
return [(-y * dx - c1) / dy, y]
def y_intersect(p0, p1, x=0):
"""
Calculates the coordinates of the intersect line and vertical line.
Vertical line defined by x coordinate.
:param p0: Start point of the line.
:param p1: End point of the line.
:param x: Vertical line coordinate.
:return: intersect point or None
"""
dx = p1[0] - p0[0]
dy = p0[1] - p1[1]
# If the line is parallel to the vertical
if abs(dx) < EPSILON:
return None
c1 = p0[0] * p1[1] - p1[0] * p0[1]
return [x, (-x * dy - c1) / dx]
def project_point_to_line(point, p0, p1):
"""
Calculates the coordinates of the orthogonal projection to line.
Line defined by two coordinate.
:param p0: Start point of the line on that the point is projected.
:param p1: End point of the line on that the point is projected.
:param point: Point to project.
:return: project point
"""
x1, y1 = p0
x2, y2 = p1
x3, y3 = point
dx = x2 - x1
dy = y2 - y1
v = dx * dx + dy * dy
# If the segment has length 0 the projection is equal to that point
if v < EPSILON:
return [x3, y3]
k = (dy * (x3 - x1) - dx * (y3 - y1)) / v
x4 = x3 - k * dy
y4 = y3 + k * dx
return [x4, y4]
|
Code | GestorPartida | import time
from Code import PGN, ControlPosicion, Gestor, Partida, TrListas
from Code.Constantes import *
from Code.QT import (
Controles,
Iconos,
PantallaEntMaq,
PantallaPGN,
PantallaSolo,
QTUtil,
QTUtil2,
QTVarios,
Voyager,
)
from PyQt4.QtCore import Qt
class GestorPartida(Gestor.Gestor):
def inicio(self, partidaCompleta, siCompleta):
self.tipoJuego = kJugSolo
self.partida = partidaCompleta
self.reinicio = self.partida.save()
self.siCompleta = siCompleta
self.siJuegaHumano = True
self.siJugamosConBlancas = True
self.siCambios = False
self.siVolteoAutomatico = False
self.estado = kJugando
li = [
k_grabar,
k_cancelar,
k_pgnInformacion,
k_atras,
k_reiniciar,
k_configurar,
k_utilidades,
]
self.pantalla.ponToolBar(li)
self.pantalla.activaJuego(True, False, siAyudas=False)
self.quitaAyudas(True, False)
self.pantalla.ponRotulo1(None)
self.pantalla.ponRotulo2(None)
self.ponMensajero(self.mueveHumano)
self.ponPosicion(self.partida.iniPosicion)
self.mostrarIndicador(True)
self.ponPiezasAbajo(partidaCompleta.iswhite())
self.pgnRefresh(True)
self.ponCapInfoPorDefecto()
self.ponteAlFinal()
self.ponPosicionDGT()
self.ponInformacion()
self.refresh()
self.siguienteJugada()
def ponInformacion(self):
if self.siCompleta:
white = black = result = None
for clave, valor in self.partida.liTags:
clave = clave.upper()
if clave == "WHITE":
white = valor
elif clave == "BLACK":
black = valor
elif clave == "RESULT":
result = valor
self.ponRotulo1(
"%s : <b>%s</b><br>%s : <b>%s</b>"
% (_("White"), white, _("Black"), black)
if white and black
else ""
)
self.ponRotulo2("%s : <b>%s</b>" % (_("Result"), result) if result else "")
self.pantalla.ponWhiteBlack(white, black)
def reiniciar(self):
if self.siCambios and not QTUtil2.pregunta(
self.pantalla, _("You will loose all changes, are you sure?")
):
return
p = Partida.PartidaCompleta()
p.restore(self.reinicio)
self.inicio(p, self.siCompleta)
def procesarAccion(self, clave):
if clave == k_reiniciar:
self.reiniciar()
elif clave == k_atras:
self.atras()
elif clave == k_grabar:
self.pantalla.accept()
elif clave == k_configurar:
self.configurarGS()
elif clave == k_utilidades:
liMasOpciones = (
("libros", _("Consult a book"), Iconos.Libros()),
(None, None, None),
("bookguide", _("Personal Opening Guide"), Iconos.BookGuide()),
(None, None, None),
("play", _("Play current position"), Iconos.MoverJugar()),
)
resp = self.utilidades(liMasOpciones)
if resp == "libros":
liMovs = self.librosConsulta(True)
if liMovs:
for x in range(len(liMovs) - 1, -1, -1):
desde, hasta, coronacion = liMovs[x]
self.mueveHumano(desde, hasta, coronacion)
elif resp == "bookguide":
self.bookGuide()
elif resp == "play":
self.jugarPosicionActual()
elif clave == k_pgnInformacion:
self.informacion()
elif clave in (k_cancelar, k_finpartida):
self.finPartida()
else:
Gestor.Gestor.rutinaAccionDef(self, clave)
def finPartida(self):
# Comprobamos que no haya habido cambios desde el ultimo grabado
if self.siCambios:
resp = QTUtil2.preguntaCancelar(
self.pantalla, _("Do you want to cancel changes?"), _("Yes"), _("No")
)
if not resp:
return False
self.pantalla.reject()
return True
def finalX(self):
return self.finPartida()
def siguienteJugada(self):
if self.estado == kFinJuego:
return
self.estado = kJugando
self.ponVista()
siBlancas = self.partida.ultPosicion.siBlancas
self.siJugamosConBlancas = (
siBlancas # Compatibilidad, sino no funciona el cambio en pgn
)
if self.siVolteoAutomatico:
time.sleep(1)
if siBlancas != self.tablero.siBlancasAbajo:
self.tablero.rotaTablero()
if self.partida.numJugadas() > 0:
jgUltima = self.partida.last_jg()
if jgUltima:
if jgUltima.siJaqueMate:
self.ponResultado(kGanaRival, not jgUltima.posicion.siBlancas)
return
if jgUltima.siAhogado:
self.ponResultado(kTablas)
return
if jgUltima.siTablasRepeticion:
self.ponResultado(kTablasRepeticion)
return
if jgUltima.siTablas50:
self.ponResultado(kTablas50)
return
if jgUltima.siTablasFaltaMaterial:
self.ponResultado(kTablasFaltaMaterial)
return
self.ponIndicador(siBlancas)
self.refresh()
self.siJuegaHumano = True
self.activaColor(siBlancas)
def mueveHumano(self, desde, hasta, coronacion=None):
self.siJuegaHumano = True
jg = self.checkMueveHumano(desde, hasta, coronacion)
if not jg:
return False
self.movimientosPiezas(jg.liMovs)
self.partida.ultPosicion = jg.posicion
self.masJugada(jg, True)
self.siguienteJugada()
return True
def masJugada(self, jg, siNuestra):
self.siCambios = True
# Preguntamos al mono si hay movimiento
if self.siTerminada():
jg.siJaqueMate = jg.siJaque
jg.siAhogado = not jg.siJaque
self.partida.append_jg(jg)
if self.partida.pendienteApertura:
self.partida.asignaApertura()
resp = self.partida.si3repetidas()
if resp:
jg.siTablasRepeticion = True
rotulo = ""
for j in resp:
rotulo += "%d," % (j / 2 + 1,)
rotulo = rotulo.strip(",")
self.rotuloTablasRepeticion = rotulo
if self.partida.ultPosicion.movPeonCap >= 100:
jg.siTablas50 = True
if self.partida.ultPosicion.siFaltaMaterial():
jg.siTablasFaltaMaterial = True
self.ponFlechaSC(jg.desde, jg.hasta)
self.beepExtendido(siNuestra)
self.pgnRefresh(self.partida.ultPosicion.siBlancas)
self.refresh()
self.ponPosicionDGT()
def ponResultado(self, quien, siBlancas=None):
self.desactivaTodas()
self.resultadoQuien = quien
self.resultadoSiBlancas = siBlancas
self.resultado = quien
if quien == kTablasRepeticion:
self.resultado = kTablas
elif quien == kTablas50:
self.resultado = kTablas
elif quien == kTablasFaltaMaterial:
self.resultado = kTablas
def actualPGN(self):
resp = ""
st = set()
for eti, valor in self.partida.liTags:
etiU = eti.upper()
if etiU in st:
continue
st.add(etiU)
resp += '[%s "%s"]\n' % (eti, valor)
if etiU == "RESULT":
result = valor
if "RESULT" not in st:
if self.resultado == kDesconocido:
result = "*"
elif self.resultado == kTablas:
result = "1/2-1/2"
else:
result = "1-0" if self.resultadoSiBlancas else "0-1"
resp += '[Result "%s"]\n' % result
if self.fen:
resp += '[FEN "%s"]\n' % self.fen
ap = self.partida.apertura
if ap:
if "ECO" not in st:
resp += '[ECO "%s"]\n' % ap.eco
if "OPENING" not in st:
resp += '[Opening "%s"]\n' % ap.trNombre
resp += "\n" + self.partida.pgnBase() + " " + result
return resp
def editarEtiquetasPGN(self):
resp = PantallaSolo.editarEtiquetasPGN(self.procesador, self.partida.liTags)
if resp:
self.partida.liTags = resp
self.siCambios = True
self.ponInformacion()
def informacion(self):
menu = QTVarios.LCMenu(self.pantalla)
f = Controles.TipoLetra(puntos=10, peso=75)
menu.ponFuente(f)
siOpening = False
for clave, valor in self.partida.liTags:
trad = TrListas.pgnLabel(clave)
if trad != clave:
clave = trad
menu.opcion(clave, "%s : %s" % (clave, valor), Iconos.PuntoAzul())
if clave.upper() == "OPENING":
siOpening = True
if not siOpening:
apertura = self.partida.apertura
if apertura:
menu.separador()
nom = apertura.trNombre
ape = _("Opening")
rotulo = nom if ape.upper() in nom.upper() else ("%s : %s" % (ape, nom))
menu.opcion("opening", rotulo, Iconos.PuntoNaranja())
menu.separador()
menu.opcion("pgn", _("Edit PGN labels"), Iconos.PGN())
resp = menu.lanza()
if resp:
self.editarEtiquetasPGN()
def configurarGS(self):
sep = (None, None, None)
liMasOpciones = [
("rotacion", _("Auto-rotate board"), Iconos.JS_Rotacion()),
sep,
("leerpgn", _("Read PGN"), Iconos.PGN_Importar()),
sep,
("pastepgn", _("Paste PGN"), Iconos.Pegar16()),
sep,
]
if not self.siCompleta:
liMasOpciones.extend(
[
("posicion", _("Start position"), Iconos.Datos()),
sep,
("pasteposicion", _("Paste FEN position"), Iconos.Pegar16()),
sep,
("voyager", _("Voyager 2"), Iconos.Voyager1()),
]
)
resp = self.configurar(liMasOpciones, siCambioTutor=True, siSonidos=True)
if resp == "rotacion":
self.siVolteoAutomatico = not self.siVolteoAutomatico
siBlancas = self.partida.ultPosicion.siBlancas
if self.siVolteoAutomatico:
if siBlancas != self.tablero.siBlancasAbajo:
self.tablero.rotaTablero()
elif resp == "posicion":
ini_fen = self.partida.iniPosicion.fen()
cur_fen = Voyager.voyagerFEN(self.pantalla, ini_fen)
if cur_fen and cur_fen != ini_fen:
self.partida.resetFEN(cur_fen)
self.inicio(self.partida, self.siCompleta)
elif resp == "pasteposicion":
texto = QTUtil.traePortapapeles()
if texto:
cp = ControlPosicion.ControlPosicion()
try:
cp.leeFen(str(texto))
self.fen = cp.fen()
self.posicApertura = None
self.reiniciar()
except:
pass
elif resp == "leerpgn":
unpgn = PantallaPGN.eligePartida(self.pantalla)
if unpgn:
partida = unpgn.partida
if self.siCompleta and not partida.siFenInicial():
return
p = Partida.PartidaCompleta()
p.leeOtra(partida)
p.asignaApertura()
p.setTags(unpgn.listaCabeceras())
self.reinicio = p.save()
self.reiniciar()
elif resp == "pastepgn":
texto = QTUtil.traePortapapeles()
if texto:
unpgn = PGN.UnPGN()
unpgn.leeTexto(texto)
if unpgn.siError:
QTUtil2.mensError(
self.pantalla,
_(
"The text from the clipboard does not contain a chess game in PGN format"
),
)
return
partida = unpgn.partida
if self.siCompleta and not partida.siFenInicial():
return
p = Partida.PartidaCompleta()
p.leeOtra(partida)
p.asignaApertura()
p.setTags(unpgn.listaCabeceras())
self.reinicio = p.save()
self.reiniciar()
elif resp == "voyager":
ptxt = Voyager.voyagerPartida(self.pantalla, self.partida)
if ptxt:
dic = self.creaDic()
dic["PARTIDA"] = ptxt
p = self.partida.copia()
p.recuperaDeTexto(ptxt)
dic["FEN"] = None if p.siFenInicial() else p.iniPosicion.fen()
dic["SIBLANCASABAJO"] = self.tablero.siBlancasAbajo
self.reiniciar(dic)
def controlTeclado(self, nkey):
if nkey == Qt.Key_V: # V
self.paste(QTUtil.traePortapapeles())
def listHelpTeclado(self):
return [
("V", _("Paste position")),
]
def juegaRival(self):
if not self.siTerminada():
self.pensando(True)
rm = self.xrival.juega(nAjustado=self.xrival.nAjustarFuerza)
self.pensando(False)
if rm.desde:
self.mueveHumano(rm.desde, rm.hasta, rm.coronacion)
def cambioRival(self):
if self.dicRival:
dicBase = self.dicRival
else:
dicBase = self.configuracion.leeVariables("ENG_GESTORSOLO")
dic = self.dicRival = PantallaEntMaq.cambioRival(
self.pantalla, self.configuracion, dicBase, siGestorSolo=True
)
if dic:
for k, v in dic.iteritems():
self.reinicio[k] = v
dr = dic["RIVAL"]
rival = dr["CM"]
r_t = dr["TIEMPO"] * 100 # Se guarda en decimas -> milesimas
r_p = dr["PROFUNDIDAD"]
if r_t <= 0:
r_t = None
if r_p <= 0:
r_p = None
if r_t is None and r_p is None and not dic["SITIEMPO"]:
r_t = 1000
nAjustarFuerza = dic["AJUSTAR"]
self.xrival = self.procesador.creaGestorMotor(
rival, r_t, r_p, nAjustarFuerza != kAjustarMejor
)
self.xrival.nAjustarFuerza = nAjustarFuerza
dic["ROTULO1"] = _("Opponent") + ": <b>" + self.xrival.nombre
self.ponRotulo1(dic["ROTULO1"])
self.siJuegaMotor = True
self.configuracion.escVariables("ENG_GESTORSOLO", dic)
def atras(self):
if self.partida.numJugadas():
self.partida.anulaSoloUltimoMovimiento()
if not self.fen:
self.partida.asignaApertura()
self.ponteAlFinal()
self.estado = kJugando
self.refresh()
self.siguienteJugada()
def tituloVentana(self):
white = ""
black = ""
event = ""
date = ""
result = ""
for clave, valor in self.partida.liTags:
if clave.upper() == "WHITE":
white = valor
elif clave.upper() == "BLACK":
black = valor
elif clave.upper() == "EVENT":
event = valor
elif clave.upper() == "DATE":
date = valor
elif clave.upper() == "RESULT":
result = valor
return "%s-%s (%s, %s,%s)" % (white, black, event, date, result)
|
extractor | discoverynetworks | # coding: utf-8
from __future__ import unicode_literals
import re
from .dplay import DPlayIE
class DiscoveryNetworksDeIE(DPlayIE):
_VALID_URL = r"https?://(?:www\.)?(?P<domain>(?:tlc|dmax)\.de|dplay\.co\.uk)/(?:programme|show|sendungen)/(?P<programme>[^/]+)/(?:video/)?(?P<alternate_id>[^/]+)"
_TESTS = [
{
"url": "https://www.tlc.de/programme/breaking-amish/video/die-welt-da-drauen/DCB331270001100",
"info_dict": {
"id": "78867",
"ext": "mp4",
"title": "Die Welt da draußen",
"description": "md5:61033c12b73286e409d99a41742ef608",
"timestamp": 1554069600,
"upload_date": "20190331",
},
"params": {
"format": "bestvideo",
"skip_download": True,
},
},
{
"url": "https://www.dmax.de/programme/dmax-highlights/video/tuning-star-sidney-hoffmann-exklusiv-bei-dmax/191023082312316",
"only_matching": True,
},
{
"url": "https://www.dplay.co.uk/show/ghost-adventures/video/hotel-leger-103620/EHD_280313B",
"only_matching": True,
},
{
"url": "https://tlc.de/sendungen/breaking-amish/die-welt-da-drauen/",
"only_matching": True,
},
]
def _real_extract(self, url):
domain, programme, alternate_id = re.match(self._VALID_URL, url).groups()
country = "GB" if domain == "dplay.co.uk" else "DE"
realm = "questuk" if country == "GB" else domain.replace(".", "")
return self._get_disco_api_info(
url,
"%s/%s" % (programme, alternate_id),
"sonic-eu1-prod.disco-api.com",
realm,
country,
)
|
feminout | importToolsFem | # ***************************************************************************
# * Copyright (c) 2017 Bernd Hahnebach <bernd@bimstatik.org> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD FEM import tools"
__author__ = "Bernd Hahnebach"
__url__ = "https://www.freecad.org"
## @package importToolsFem
# \ingroup FEM
# \brief FreeCAD FEM import tools
import FreeCAD
from FreeCAD import Console
def get_FemMeshObjectMeshGroups(fem_mesh_obj):
"""
Get mesh groups from mesh.
"""
# this method is not really needed. It is used in Fenics mesh only.
# there was an exception handling if there was no Group property, but
# any FemMesh should have the Group property
# if not it would be a bug SMESH
return fem_mesh_obj.FemMesh.Groups
def get_FemMeshObjectOrder(fem_mesh_obj):
"""
Gets element order. Element order counting based on number of nodes on
edges. Edge with 2 nodes -> linear elements, Edge with 3 nodes ->
quadratic elements, and so on. No edges in mesh -> not determined.
(Is this possible? Seems to be a very degenerate case.)
If there are edges with different number of nodes appearing, return
list of orders.
"""
presumable_order = None
edges = fem_mesh_obj.FemMesh.Edges
if edges != ():
edges_length_set = list(
{len(fem_mesh_obj.FemMesh.getElementNodes(e)) for e in edges}
)
# only need set to eliminate double entries
if len(edges_length_set) == 1:
presumable_order = edges_length_set[0] - 1
else:
presumable_order = [el - 1 for el in edges_length_set]
else:
Console.PrintMessage(
"Found no edges in mesh: Element order determination does not work without them.\n"
)
return presumable_order
def get_FemMeshObjectDimension(fem_mesh_obj):
"""Count all entities in an abstract sense, to distinguish which dimension the mesh is
(i.e. linemesh, facemesh, volumemesh)
"""
dim = None
if fem_mesh_obj.FemMesh.Nodes != ():
dim = 0
if fem_mesh_obj.FemMesh.Edges != ():
dim = 1
if fem_mesh_obj.FemMesh.Faces != ():
dim = 2
if fem_mesh_obj.FemMesh.Volumes != ():
dim = 3
return dim
def get_FemMeshObjectElementTypes(fem_mesh_obj, remove_zero_element_entries=True):
"""
Spit out all elements in the mesh with their appropriate dimension.
"""
FreeCAD_element_names_dims = {
"Node": 0,
"Edge": 1,
"Hexa": 3,
"Polygon": 2,
"Polyhedron": 3,
"Prism": 3,
"Pyramid": 3,
"Quadrangle": 2,
"Tetra": 3,
"Triangle": 2,
}
eval_dict = locals() # to access local variables from eval
elements_list_with_zero = [
(eval("fem_mesh_obj.FemMesh." + s + "Count", eval_dict), s, d)
for (s, d) in FreeCAD_element_names_dims.items()
]
# ugly but necessary
if remove_zero_element_entries:
elements_list = [
(num, s, d) for (num, s, d) in elements_list_with_zero if num > 0
]
else:
elements_list = elements_list_with_zero
return elements_list
def get_MaxDimElementFromList(elem_list):
"""
Gets element with the maximal dimension in the mesh to determine cells.
"""
elem_list.sort(key=lambda t: t[2])
return elem_list[-1]
def make_femmesh(mesh_data):
"""makes an FreeCAD FEM Mesh object from FEM Mesh data"""
import Fem
mesh = Fem.FemMesh()
m = mesh_data
if ("Nodes" in m) and (len(m["Nodes"]) > 0):
FreeCAD.Console.PrintLog("Found: nodes\n")
if (
("Seg2Elem" in m)
or ("Seg3Elem" in m)
or ("Tria3Elem" in m)
or ("Tria6Elem" in m)
or ("Quad4Elem" in m)
or ("Quad8Elem" in m)
or ("Tetra4Elem" in m)
or ("Tetra10Elem" in m)
or ("Penta6Elem" in m)
or ("Penta15Elem" in m)
or ("Hexa8Elem" in m)
or ("Hexa20Elem" in m)
):
nds = m["Nodes"]
FreeCAD.Console.PrintLog("Found: elements\n")
for i in nds:
n = nds[i]
mesh.addNode(n[0], n[1], n[2], i)
elms_hexa8 = m["Hexa8Elem"]
for i in elms_hexa8:
e = elms_hexa8[i]
mesh.addVolume([e[0], e[1], e[2], e[3], e[4], e[5], e[6], e[7]], i)
elms_penta6 = m["Penta6Elem"]
for i in elms_penta6:
e = elms_penta6[i]
mesh.addVolume([e[0], e[1], e[2], e[3], e[4], e[5]], i)
elms_tetra4 = m["Tetra4Elem"]
for i in elms_tetra4:
e = elms_tetra4[i]
mesh.addVolume([e[0], e[1], e[2], e[3]], i)
elms_tetra10 = m["Tetra10Elem"]
for i in elms_tetra10:
e = elms_tetra10[i]
mesh.addVolume(
[e[0], e[1], e[2], e[3], e[4], e[5], e[6], e[7], e[8], e[9]], i
)
elms_penta15 = m["Penta15Elem"]
for i in elms_penta15:
e = elms_penta15[i]
mesh.addVolume(
[
e[0],
e[1],
e[2],
e[3],
e[4],
e[5],
e[6],
e[7],
e[8],
e[9],
e[10],
e[11],
e[12],
e[13],
e[14],
],
i,
)
elms_hexa20 = m["Hexa20Elem"]
for i in elms_hexa20:
e = elms_hexa20[i]
mesh.addVolume(
[
e[0],
e[1],
e[2],
e[3],
e[4],
e[5],
e[6],
e[7],
e[8],
e[9],
e[10],
e[11],
e[12],
e[13],
e[14],
e[15],
e[16],
e[17],
e[18],
e[19],
],
i,
)
elms_tria3 = m["Tria3Elem"]
for i in elms_tria3:
e = elms_tria3[i]
mesh.addFace([e[0], e[1], e[2]], i)
elms_tria6 = m["Tria6Elem"]
for i in elms_tria6:
e = elms_tria6[i]
mesh.addFace([e[0], e[1], e[2], e[3], e[4], e[5]], i)
elms_quad4 = m["Quad4Elem"]
for i in elms_quad4:
e = elms_quad4[i]
mesh.addFace([e[0], e[1], e[2], e[3]], i)
elms_quad8 = m["Quad8Elem"]
for i in elms_quad8:
e = elms_quad8[i]
mesh.addFace([e[0], e[1], e[2], e[3], e[4], e[5], e[6], e[7]], i)
elms_seg2 = m["Seg2Elem"]
for i in elms_seg2:
e = elms_seg2[i]
mesh.addEdge([e[0], e[1]], i)
elms_seg3 = m["Seg3Elem"]
for i in elms_seg3:
e = elms_seg3[i]
mesh.addEdge([e[0], e[1], e[2]], i)
Console.PrintLog(
"imported mesh: {} nodes, {} HEXA8, {} PENTA6, {} TETRA4, {} TETRA10, {} PENTA15\n".format(
len(nds),
len(elms_hexa8),
len(elms_penta6),
len(elms_tetra4),
len(elms_tetra10),
len(elms_penta15),
)
)
Console.PrintLog(
"imported mesh: {} "
"HEXA20, {} TRIA3, {} TRIA6, {} QUAD4, {} QUAD8, {} SEG2, {} SEG3\n".format(
len(elms_hexa20),
len(elms_tria3),
len(elms_tria6),
len(elms_quad4),
len(elms_quad8),
len(elms_seg2),
len(elms_seg3),
)
)
else:
Console.PrintError("No Elements found!\n")
else:
Console.PrintError("No Nodes found!\n")
return mesh
def make_dict_from_femmesh(femmesh):
"""
Converts FemMesh into dictionary structure which can immediately used
from importToolsFem.make_femmesh(mesh_data) to create a valid FEM mesh.
"""
# this dict can be easily saved and reloaded by yaml
# see importYamlJasonMesh for a implementation
mesh_data = {}
seg2 = []
seg3 = []
tri3 = []
tri6 = []
quad4 = []
quad8 = []
tet4 = []
tet10 = []
hex8 = []
hex20 = []
pent6 = []
pent15 = []
# associations for lengths of tuples to different
# edge, face, and volume elements
len_to_edge = {2: seg2, 3: seg3}
len_to_face = {3: tri3, 6: tri6, 4: quad4, 8: quad8}
len_to_volume = {4: tet4, 10: tet10, 8: hex8, 20: hex20, 6: pent6, 15: pent15}
# analyze edges
for e in femmesh.Edges:
t = femmesh.getElementNodes(e)
len_to_edge[len(t)].append((e, t))
# analyze faces
for f in femmesh.Faces:
t = femmesh.getElementNodes(f)
len_to_face[len(t)].append((f, t))
# analyze volumes
for v in femmesh.Volumes:
t = femmesh.getElementNodes(v)
len_to_volume[len(t)].append((v, t))
mesh_data = {
"Nodes": dict([(k, (v.x, v.y, v.z)) for (k, v) in femmesh.Nodes.items()]),
"Seg2Elem": dict(seg2),
"Seg3Elem": dict(seg3),
"Tria3Elem": dict(tri3),
"Tria6Elem": dict(tri6),
"Quad4Elem": dict(quad4),
"Quad8Elem": dict(quad8),
"Tetra4Elem": dict(tet4),
"Tetra10Elem": dict(tet10),
"Hexa8Elem": dict(hex8),
"Hexa20Elem": dict(hex20),
"Penta6Elem": dict(pent6),
"Penta15Elem": dict(pent15),
"Groups": dict(
[
(
group_num,
(
femmesh.getGroupName(group_num),
femmesh.getGroupElements(group_num),
),
)
for group_num in femmesh.Groups
]
),
}
# no pyr5, pyr13?
# no groups?
return mesh_data
def fill_femresult_mechanical(res_obj, result_set):
"""fills a FreeCAD FEM mechanical result object with result data"""
if "number" in result_set:
eigenmode_number = result_set["number"]
else:
eigenmode_number = 0
if "time" in result_set:
step_time = result_set["time"]
step_time = round(step_time, 2)
# if disp exists, fill res_obj.NodeNumbers and
# res_obj.DisplacementVectors as well as stress and strain
# furthermore the eigenmode number
if "disp" in result_set:
disp = result_set["disp"]
res_obj.DisplacementVectors = list(map((lambda x: x), disp.values()))
res_obj.NodeNumbers = list(disp)
# fill res_obj.NodeStressXX etc if they exist in result_set
# list values are just added
# Should we check if the key in stress and strain dict
# is the same as the number in NodeNumbers?
if "stress" in result_set:
stress = result_set["stress"]
Sxx = []
Syy = []
Szz = []
Sxy = []
Sxz = []
Syz = []
# values_S .. stress_tensor .. (Sxx, Syy, Szz, Sxy, Sxz, Syz)
for i, values_S in enumerate(stress.values()):
Sxx.append(values_S[0])
Syy.append(values_S[1])
Szz.append(values_S[2])
Sxy.append(values_S[3])
Sxz.append(values_S[4])
Syz.append(values_S[5])
res_obj.NodeStressXX = Sxx
res_obj.NodeStressYY = Syy
res_obj.NodeStressZZ = Szz
res_obj.NodeStressXY = Sxy
res_obj.NodeStressXZ = Sxz
res_obj.NodeStressYZ = Syz
# fill res_obj.NodeStrainXX etc if they exist in result_set
if "strain" in result_set:
strain = result_set["strain"]
Exx = []
Eyy = []
Ezz = []
Exy = []
Exz = []
Eyz = []
# values_E .. straintuple .. (Exx, Eyy, Ezz, Exy, Exz, Eyz)
for i, values_E in enumerate(strain.values()):
Exx.append(values_E[0])
Eyy.append(values_E[1])
Ezz.append(values_E[2])
Exy.append(values_E[3])
Exz.append(values_E[4])
Eyz.append(values_E[5])
res_obj.NodeStrainXX = Exx
res_obj.NodeStrainYY = Eyy
res_obj.NodeStrainZZ = Ezz
res_obj.NodeStrainXY = Exy
res_obj.NodeStrainXZ = Exz
res_obj.NodeStrainYZ = Eyz
# fill Equivalent Plastic strain if they exist
if "peeq" in result_set:
Peeq = result_set["peeq"]
if len(Peeq) > 0:
if len(Peeq.values()) != len(disp.values()):
# how is this possible? An example is needed!
Console.PrintError("PEEQ seems to have extra nodes.\n")
Pe = []
Pe_extra_nodes = list(Peeq.values())
nodes = len(disp.values())
for i in range(nodes):
Pe_value = Pe_extra_nodes[i]
Pe.append(Pe_value)
res_obj.Peeq = Pe
else:
res_obj.Peeq = list(Peeq.values())
# fill eigenmode number if they exist
if eigenmode_number > 0:
res_obj.Eigenmode = eigenmode_number
# it is assumed Temperature can not exist without disp
# TODO really proof this
# if temperature can exist without disp:
# move them out of disp if conditiona and set NodeNumbers
if "temp" in result_set:
Temperature = result_set["temp"]
if len(Temperature) > 0:
if len(Temperature.values()) != len(disp.values()):
Temp = []
Temp_extra_nodes = list(Temperature.values())
nodes = len(disp.values())
for i in range(nodes):
# how is this possible? An example is needed!
Console.PrintError("Temperature seams to have exptra nodes.\n")
Temp_value = Temp_extra_nodes[i]
Temp.append(Temp_value)
res_obj.Temperature = list(map((lambda x: x), Temp))
else:
res_obj.Temperature = list(map((lambda x: x), Temperature.values()))
res_obj.Time = step_time
# fill res_obj.MassFlow
if "mflow" in result_set:
MassFlow = result_set["mflow"]
if len(MassFlow) > 0:
res_obj.MassFlowRate = list(map((lambda x: x), MassFlow.values()))
res_obj.Time = step_time
# disp does not exist, res_obj.NodeNumbers needs to be set
res_obj.NodeNumbers = list(MassFlow)
# fill res_obj.NetworkPressure, disp does not exist, see MassFlow
if "npressure" in result_set:
NetworkPressure = result_set["npressure"]
if len(NetworkPressure) > 0:
res_obj.NetworkPressure = list(map((lambda x: x), NetworkPressure.values()))
res_obj.Time = step_time
return res_obj
|
net | CA | # neubot/net/CA.py
#
# Copyright (c) 2010-2011 Simone Basso <bassosimone@gmail.com>,
# NEXA Center for Internet & Society at Politecnico di Torino
#
# This file is part of Neubot <http://www.neubot.org/>.
#
# Neubot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Neubot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Neubot. If not, see <http://www.gnu.org/licenses/>.
#
"""
Generate private key and certificate file for the Neubot
server. This works under posix only and typically the file
is written at `/etc/neubot/cert.pem`.
The flaw of this schema is that if someone attacks neubot(1)
successfully she has enough permissions to steal and/or modify
the content of `/etc/neubot/cert.pem`.
"""
import logging
import os.path
import sys
if os.name != "posix":
sys.exit("This command runs under 'posix' only")
import pwd
import subprocess
if __name__ == "__main__":
sys.path.insert(0, ".")
from neubot.config import CONFIG
from neubot.main import common
CONFIG.register_defaults(
{
"net.CA.basedir": "/etc/neubot",
"net.CA.bits": 4096,
"net.CA.cacert": "_cacert.pem",
"net.CA.days": 1095,
"net.CA.privkey": "_privkey.pem",
}
)
def main(args):
"""Generate private key and certificate file for Neubot server"""
CONFIG.register_descriptions(
{
"net.CA.bits": "Set private key bits number",
"net.CA.cacert": "Set certificate file path",
"net.CA.days": "Set days before expire",
"net.CA.privkey": "Set private key file path",
}
)
common.main("net.CA", "generate test certificates", args)
conf = CONFIG.copy()
#
# We need to be root because we play with file and
# directories permissions and ownership which, in the
# common case, cannot be done by other users.
#
if os.getuid():
sys.exit("This command must be invoked as root")
#
# Force a standard umask but note that we will
# override perms when needed.
# Create the base directory and allow root to
# populate and others just to read and list.
#
os.umask(0022)
if not os.path.exists(conf["net.CA.basedir"]):
os.mkdir(conf["net.CA.basedir"], 0755)
# Make paths absolute
conf["net.CA.cacert"] = os.sep.join([conf["net.CA.basedir"], conf["net.CA.cacert"]])
conf["net.CA.privkey"] = os.sep.join(
[conf["net.CA.basedir"], conf["net.CA.privkey"]]
)
# Generate RSA private key
genrsa = [
"openssl",
"genrsa",
"-out",
conf["net.CA.privkey"],
str(conf["net.CA.bits"]),
]
logging.debug("CA: exec: %s", genrsa)
subprocess.call(genrsa)
# Generate self-signed certificate
req = [
"openssl",
"req",
"-new",
"-x509",
"-key",
conf["net.CA.privkey"],
"-out",
conf["net.CA.cacert"],
"-days",
str(conf["net.CA.days"]),
]
logging.debug("CA: exec: %s", req)
subprocess.call(req)
#
# Merge private key and self-signed certificate into
# the same file. While there, remove the original files
# from the filesystem.
#
certfile = os.sep.join([conf["net.CA.basedir"], "cert.pem"])
outfp = open(certfile, "w")
for key in ("net.CA.privkey", "net.CA.cacert"):
fpin = open(conf[key], "r")
os.unlink(conf[key])
outfp.write(fpin.read())
fpin.close()
outfp.close()
#
# Allow the `_neubot` user to read the file and set
# very restrictive permissions. Note that an attacker
# running as the `_neubot` user can steal or modify
# the on-disk private key quite easily. This is the
# basic flaw of the current SSL schema in Neubot.
#
rec = pwd.getpwnam("_neubot")
os.chown(certfile, rec.pw_uid, rec.pw_gid)
os.chmod(certfile, 0400)
if __name__ == "__main__":
main(sys.argv)
|
migrations | 0003_populate_license_chooser | # Generated by Django 2.2.1 on 2019-05-27 11:28
from django.db import migrations
def create_osf_license_choosers(apps, schema_editor):
"""
Creates LicenseChooser objects for osf repositories as they were hard-coded before introduced model.
"""
License = apps.get_model("deposit", "License")
Repository = apps.get_model("deposit", "Repository")
LicenseChooser = apps.get_model("deposit", "LicenseChooser")
repos_osf = Repository.objects.filter(protocol="OSFProtocol")
for repo in repos_osf:
# Default license
license = License.objects.get(
uri="https://dissem.in/deposit/license/no-license/"
)
LicenseChooser.objects.create(
repository=repo,
license=license,
transmit_id="563c1cf88c5e4a3877f9e965",
position=0,
default=True,
)
# Non-default licenses
license_list = [
(
"https://creativecommons.org/publicdomain/zero/1.0/",
"563c1cf88c5e4a3877f9e96c",
0,
),
(
"https://creativecommons.org/licenses/by/4.0/",
"563c1cf88c5e4a3877f9e96a",
1,
),
]
for license_item in license_list:
license = License.objects.get(uri=license_item[0])
LicenseChooser.objects.create(
repository=repo,
license=license,
transmit_id=license_item[1],
position=license_item[2],
default=False,
)
def remove_osf_license_choosers(apps, schema_editor):
"""
Removes license chooser corresponding to osf
"""
LicenseChooser = apps.get_model("deposit", "LicenseChooser")
LicenseChooser.objects.filter(repository__protocol="OSFProtocol").delete()
class Migration(migrations.Migration):
dependencies = [
("osf", "0002_osf_id"),
("deposit", "0014_populate_licenses"),
]
operations = [
migrations.RunPython(create_osf_license_choosers, remove_osf_license_choosers)
]
|
cdp | client | import base64
import re
from contextlib import asynccontextmanager
from dataclasses import dataclass
from typing import Any, AsyncGenerator, Awaitable, Callable, Coroutine, List, Mapping, Optional, Set
import trio
from streamlink.session import Streamlink
from streamlink.webbrowser.cdp.connection import CDPConnection, CDPSession
from streamlink.webbrowser.cdp.devtools import fetch, network, page, runtime, target
from streamlink.webbrowser.cdp.exceptions import CDPError
from streamlink.webbrowser.chromium import ChromiumWebbrowser
try:
from typing import Self, TypeAlias # type: ignore[attr-defined]
except ImportError: # pragma: no cover
from typing_extensions import Self, TypeAlias
TRequestHandlerCallable: TypeAlias = Callable[["CDPClientSession", fetch.RequestPaused], Awaitable]
_re_url_pattern_wildcard = re.compile(r"(.+?)?(\\+)?([*?])")
@dataclass
class RequestPausedHandler:
async_handler: TRequestHandlerCallable
url_pattern: str = "*"
on_request: bool = False
def __post_init__(self) -> None:
self._re_url: re.Pattern = self._url_pattern_to_regex_pattern(self.url_pattern)
def matches(self, request: fetch.RequestPaused) -> bool:
on_request: bool = request.response_status_code is None and request.response_error_reason is None
return on_request is self.on_request and self._re_url.match(request.request.url) is not None
@staticmethod
def _url_pattern_to_regex_pattern(url_pattern: str) -> re.Pattern:
pos = 0
regex = ""
for match in _re_url_pattern_wildcard.finditer(url_pattern):
regex += re.escape(match[1]) if match[1] else ""
if match[2]:
if len(match[2]) % 2:
regex += f"{re.escape(match[2][:-1])}\\{match[3]}"
else:
regex += re.escape(match[2])
regex += ".+" if match[3] == "*" else "."
else:
regex += ".+" if match[3] == "*" else "."
pos = match.end()
regex += re.escape(url_pattern[pos:])
return re.compile(f"^{regex}$")
@dataclass
class CMRequestProxy:
body: str
response_code: int
response_headers: Optional[Mapping[str, str]]
class CDPClient:
"""
The public interface around :class:`ChromiumWebbrowser <streamlink.webbrowser.chromium.ChromiumWebbrowser>`
and :class:`CDPConnection <streamlink.webbrowser.cdp.connection.CDPConnection>`.
It launches the Chromium-based web browser, establishes the remote debugging WebSocket connection using
the `Chrome Devtools Protocol <https://chromedevtools.github.io/devtools-protocol/>`_, and provides
the :meth:`session()` method for creating a new :class:`CDPClientSession` that is tied to an empty new browser tab.
:class:`CDPClientSession` provides a high-level API for navigating websites, intercepting network requests and responses,
as well as evaluating JavaScript expressions and retrieving async results.
Don't instantiate this class yourself, use the :meth:`CDPClient.launch()` async context manager classmethod.
For low-level Chrome Devtools Protocol interfaces, please see Streamlink's automatically generated
``streamlink.webbrowser.cdp.devtools`` package, but be aware that only a subset of the available domains is supported.
"""
def __init__(self, cdp_connection: CDPConnection, nursery: trio.Nursery):
self.cdp_connection = cdp_connection
self.nursery = nursery
@classmethod
def launch(
cls,
session: Streamlink,
runner: Callable[[Self], Coroutine],
executable: Optional[str] = None,
timeout: Optional[float] = None,
cdp_host: Optional[str] = None,
cdp_port: Optional[int] = None,
cdp_timeout: Optional[float] = None,
headless: Optional[bool] = None,
) -> Any:
"""
Start a new :mod:`trio` runloop and do the following things:
1. Launch the Chromium-based web browser using the provided parameters or respective session options
2. Initialize a new :class:`CDPConnection <streamlink.webbrowser.cdp.connection.CDPConnection>`
and connect to the browser's remote debugging interface
3. Create a new :class:`CDPClient` instance
4. Execute the async runner callback with the :class:`CDPClient` instance as only argument
If the ``webbrowser`` session option is set to ``False``, then a :exc:`CDPError` will be raised.
Example:
.. code-block:: python
async def fake_response(client_session: CDPClientSession, request: devtools.fetch.RequestPaused):
if request.response_status_code is not None and 300 <= request.response_status_code < 400:
await client_session.continue_request(request)
else:
async with client_session.alter_request(request) as cmproxy:
cmproxy.body = "<!doctype html><html><body>foo</body></html>"
async def my_app_logic(client: CDPClient):
async with client.session() as client_session:
client_session.add_request_handler(fake_response, "*")
async with client_session.navigate("https://google.com") as frame_id:
await client_session.loaded(frame_id)
return await client_session.evaluate("document.body.innerText")
assert CDPClient.launch(session, my_app_logic) == "foo"
:param session: The Streamlink session object
:param runner: An async client callback function which receives the :class:`CDPClient` instance as only parameter.
:param executable: Optional path to the Chromium-based web browser executable.
If unset, falls back to the ``webbrowser-executable`` session option.
Otherwise, it'll be looked up according to the rules of the :class:`ChromiumBrowser` implementation.
:param timeout: Optional global timeout value, including web browser launch time.
If unset, falls back to the ``webbrowser-timeout`` session option.
:param cdp_host: Optional remote debugging host.
If unset, falls back to the ``webbrowser-cdp-host`` session option.
Otherwise, ``127.0.0.1`` will be used.
:param cdp_port: Optional remote debugging port.
If unset, falls back to the ``webbrowser-cdp-port`` session option.
Otherwise, a random free port will be chosen.
:param cdp_timeout: Optional CDP command timeout value.
If unset, falls back to the ``webbrowser-cdp-timeout`` session option.
:param headless: Optional boolean flag whether to launch the web browser in headless mode or not.
If unset, falls back to the ``webbrowser-headless`` session option.
"""
if not session.get_option("webbrowser"):
raise CDPError("The webbrowser API has been disabled by the user")
async def run_wrapper() -> Any:
async with cls.run(
session=session,
executable=session.get_option("webbrowser-executable") if executable is None else executable,
timeout=session.get_option("webbrowser-timeout") if timeout is None else timeout,
cdp_host=session.get_option("webbrowser-cdp-host") if cdp_host is None else cdp_host,
cdp_port=session.get_option("webbrowser-cdp-port") if cdp_port is None else cdp_port,
cdp_timeout=session.get_option("webbrowser-cdp-timeout") if cdp_timeout is None else cdp_timeout,
headless=session.get_option("webbrowser-headless") if headless is None else headless,
) as cdp_client:
return await runner(cdp_client)
return trio.run(run_wrapper)
@classmethod
@asynccontextmanager
async def run(
cls,
session: Streamlink,
executable: Optional[str] = None,
timeout: Optional[float] = None,
cdp_host: Optional[str] = None,
cdp_port: Optional[int] = None,
cdp_timeout: Optional[float] = None,
headless: bool = True,
) -> AsyncGenerator[Self, None]:
webbrowser = ChromiumWebbrowser(executable=executable, host=cdp_host, port=cdp_port, headless=headless)
nursery: trio.Nursery
async with webbrowser.launch(timeout=timeout) as nursery:
websocket_url = webbrowser.get_websocket_url(session)
cdp_connection: CDPConnection
async with CDPConnection.create(websocket_url, timeout=cdp_timeout) as cdp_connection:
yield cls(cdp_connection, nursery)
@asynccontextmanager
async def session(self, fail_unhandled_requests: bool = False) -> AsyncGenerator["CDPClientSession", None]:
"""
Create a new CDP session on an empty target (browser tab).
:param fail_unhandled_requests: Whether network requests which are not matched by any request handlers should fail.
"""
cdp_session = await self.cdp_connection.new_target()
yield CDPClientSession(self, cdp_session, fail_unhandled_requests)
class CDPClientSession:
"""
High-level API for navigating websites, intercepting network requests/responses,
and for evaluating async JavaScript expressions.
Don't instantiate this class yourself, use the :meth:`CDPClient.session()` async contextmanager.
"""
def __init__(
self,
cdp_client: CDPClient,
cdp_session: CDPSession,
fail_unhandled_requests: bool = False,
):
self.cdp_client = cdp_client
self.cdp_session = cdp_session
self._fail_unhandled = fail_unhandled_requests
self._request_handlers: List[RequestPausedHandler] = []
self._requests_handled: Set[str] = set()
def add_request_handler(
self,
async_handler: TRequestHandlerCallable,
url_pattern: str = "*",
on_request: bool = False,
):
"""
:param async_handler: An async request handler which must call :meth:`continue_request()`, :meth:`fail_request()`,
:meth:`fulfill_request()` or :meth:`alter_request()`, or the next matching request handler
will be run. If no matching request handler was found or if no matching one called one of
the just mentioned methods, then the request will be continued if the session was initialized
with ``fail_unhandled_requests=False``, otherwise it will be blocked.
:param url_pattern: An optional URL wildcard string which defaults to ``"*"``. Only matching URLs will cause
``Fetch.requestPraused`` events to be emitted over the CDP connection.
The async request handler will be called on each matching URL unless another request handler
has already handled the request (see description above).
:param on_request: Whether to intercept the network request or the network response.
"""
self._request_handlers.append(
RequestPausedHandler(async_handler=async_handler, url_pattern=url_pattern, on_request=on_request),
)
@asynccontextmanager
async def navigate(self, url: str, referrer: Optional[str] = None) -> AsyncGenerator[page.FrameId, None]:
"""
Async context manager for opening the URL with an optional referrer and starting the optional interception
of network requests and responses.
If the target gets detached from the session, e.g. by closing the tab, then the whole CDP connection gets terminated,
including all other concurrent sessions.
Doesn't wait for the request to finish loading. See :meth:`loaded()`.
:param url: The URL.
:param referrer: An optional referrer.
:return: Yields the ``FrameID`` that can be passed to the :meth:`loaded()` call.
"""
request_patterns = [
fetch.RequestPattern(
url_pattern=url_pattern,
request_stage=fetch.RequestStage.REQUEST if on_request else fetch.RequestStage.RESPONSE,
)
for url_pattern, on_request in sorted(
{(request_handler.url_pattern, request_handler.on_request) for request_handler in self._request_handlers},
)
]
async with trio.open_nursery() as nursery:
nursery.start_soon(self._on_target_detached_from_target)
if request_patterns:
nursery.start_soon(self._on_fetch_request_paused)
await self.cdp_session.send(fetch.enable(request_patterns, True))
await self.cdp_session.send(page.enable())
try:
frame_id, loader_id, error = await self.cdp_session.send(page.navigate(url=url, referrer=referrer))
if error:
raise CDPError(f"Navigation error: {error}")
yield frame_id
finally:
await self.cdp_session.send(page.disable())
if request_patterns:
await self.cdp_session.send(fetch.disable())
nursery.cancel_scope.cancel()
async def loaded(self, frame_id: page.FrameId):
"""
Wait for the navigated page to finish loading.
"""
async for frame_stopped_loading in self.cdp_session.listen(page.FrameStoppedLoading): # pragma: no branch
if frame_stopped_loading.frame_id == frame_id:
return
async def evaluate(self, expression: str, await_promise: bool = True, timeout: Optional[float] = None) -> Any:
"""
Evaluate an optionally async JavaScript expression and return its result.
:param expression: The JavaScript expression.
:param await_promise: Whether to await a returned :js:class:`Promise` object.
:param timeout: Optional timeout override value. Uses the session's single CDP command timeout value by default,
which may be too short depending on the script execution time.
:raise CDPError: On evaluation error or if the result is a subtype of :js:class:`window.Error`.
:return: Only JS-primitive result values are supported, e.g. strings or numbers.
Other kinds of return values must be serialized, e.g. via :js:meth:`JSON.stringify()`.
"""
evaluate = runtime.evaluate(
expression=expression,
await_promise=await_promise,
)
remote_obj, error = await self.cdp_session.send(evaluate, timeout=timeout)
if error:
raise CDPError(error.exception and error.exception.description or error.text)
if remote_obj.type_ == "object" and remote_obj.subtype == "error":
raise CDPError(remote_obj.description)
return remote_obj.value
async def continue_request(
self,
request: fetch.RequestPaused,
url: Optional[str] = None,
method: Optional[str] = None,
post_data: Optional[str] = None,
headers: Optional[Mapping[str, str]] = None,
):
"""
Continue a request and optionally override the request method, URL, POST data or request headers.
"""
await self.cdp_session.send(
fetch.continue_request(
request_id=request.request_id,
url=url,
method=method,
post_data=base64.b64encode(post_data.encode()).decode() if post_data is not None else None,
headers=self._headers_entries_from_mapping(headers),
)
)
self._requests_handled.add(request.request_id)
async def fail_request(
self,
request: fetch.RequestPaused,
error_reason: Optional[str] = None,
):
"""
Let a request fail, with an optional error reason which defaults to ``BlockedByClient``.
"""
await self.cdp_session.send(
fetch.fail_request(
request_id=request.request_id,
error_reason=network.ErrorReason(error_reason or network.ErrorReason.BLOCKED_BY_CLIENT),
)
)
self._requests_handled.add(request.request_id)
async def fulfill_request(
self,
request: fetch.RequestPaused,
response_code: int = 200,
response_headers: Optional[Mapping[str, str]] = None,
body: Optional[str] = None,
) -> None:
"""
Fulfill a response and override its status code, headers and body.
"""
await self.cdp_session.send(
fetch.fulfill_request(
request_id=request.request_id,
response_code=response_code,
response_headers=self._headers_entries_from_mapping(response_headers),
body=base64.b64encode(body.encode()).decode() if body is not None else None,
)
)
self._requests_handled.add(request.request_id)
@asynccontextmanager
async def alter_request(
self,
request: fetch.RequestPaused,
response_code: int = 200,
response_headers: Optional[Mapping[str, str]] = None,
) -> AsyncGenerator[CMRequestProxy, None]:
"""
Async context manager wrapper around :meth:`fulfill_request()` which retrieves the response body,
so it can be altered. The status code and headers can be altered in the method call directly,
or by setting the respective parameters on the context manager's proxy object.
"""
if request.response_status_code is None:
body = ""
else:
body, b64encoded = await self.cdp_session.send(fetch.get_response_body(request.request_id))
if b64encoded: # pragma: no branch
body = base64.b64decode(body).decode()
proxy = CMRequestProxy(body=body, response_code=response_code, response_headers=response_headers)
yield proxy
await self.fulfill_request(
request=request,
response_code=proxy.response_code,
response_headers=proxy.response_headers,
body=proxy.body,
)
@staticmethod
def _headers_entries_from_mapping(headers: Optional[Mapping[str, str]]):
return None if headers is None else [fetch.HeaderEntry(name=name, value=value) for name, value in headers.items()]
async def _on_target_detached_from_target(self) -> None:
async for detached_from_target in self.cdp_client.cdp_connection.listen(target.DetachedFromTarget):
if detached_from_target.session_id == self.cdp_session.session_id:
raise CDPError("Target has been detached")
async def _on_fetch_request_paused(self) -> None:
async for request in self.cdp_session.listen(fetch.RequestPaused):
for handler in self._request_handlers:
if not handler.matches(request):
continue
await handler.async_handler(self, request)
if request.request_id in self._requests_handled:
break
else:
if self._fail_unhandled:
await self.fail_request(request)
else:
await self.continue_request(request)
|
models | embedded_ssd_mobilenet_v1_feature_extractor | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Embedded-friendly SSDFeatureExtractor for MobilenetV1 features."""
import tensorflow as tf
from app.object_detection.models import (
feature_map_generators,
ssd_mobilenet_v1_feature_extractor,
)
from app.object_detection.utils import ops
from nets import mobilenet_v1
slim = tf.contrib.slim
class EmbeddedSSDMobileNetV1FeatureExtractor(
ssd_mobilenet_v1_feature_extractor.SSDMobileNetV1FeatureExtractor
):
"""Embedded-friendly SSD Feature Extractor using MobilenetV1 features.
This feature extractor is similar to SSD MobileNetV1 feature extractor, and
it fixes input resolution to be 256x256, reduces the number of feature maps
used for box prediction and ensures convolution kernel to be no larger
than input tensor in spatial dimensions.
This feature extractor requires support of the following ops if used in
embedded devices:
- Conv
- DepthwiseConv
- Relu6
All conv/depthwiseconv use SAME padding, and no additional spatial padding is
needed.
"""
def __init__(
self,
is_training,
depth_multiplier,
min_depth,
pad_to_multiple,
conv_hyperparams,
batch_norm_trainable=True,
reuse_weights=None,
):
"""MobileNetV1 Feature Extractor for Embedded-friendly SSD Models.
Args:
is_training: whether the network is in training mode.
depth_multiplier: float depth multiplier for feature extractor.
min_depth: minimum feature extractor depth.
pad_to_multiple: the nearest multiple to zero pad the input height and
width dimensions to. For EmbeddedSSD it must be set to 1.
conv_hyperparams: tf slim arg_scope for conv2d and separable_conv2d ops.
batch_norm_trainable: Whether to update batch norm parameters during
training or not. When training with a small batch size
(e.g. 1), it is desirable to disable batch norm update and use
pretrained batch norm params.
reuse_weights: Whether to reuse variables. Default is None.
Raises:
ValueError: upon invalid `pad_to_multiple` values.
"""
if pad_to_multiple != 1:
raise ValueError(
"Embedded-specific SSD only supports `pad_to_multiple` " "of 1."
)
super(EmbeddedSSDMobileNetV1FeatureExtractor, self).__init__(
is_training,
depth_multiplier,
min_depth,
pad_to_multiple,
conv_hyperparams,
batch_norm_trainable,
reuse_weights,
)
def extract_features(self, preprocessed_inputs):
"""Extract features from preprocessed inputs.
Args:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
feature_maps: a list of tensors where the ith tensor has shape
[batch, height_i, width_i, depth_i]
"""
preprocessed_inputs.get_shape().assert_has_rank(4)
shape_assert = tf.Assert(
tf.logical_and(
tf.equal(tf.shape(preprocessed_inputs)[1], 256),
tf.equal(tf.shape(preprocessed_inputs)[2], 256),
),
["image size must be 256 in both height and width."],
)
feature_map_layout = {
"from_layer": ["Conv2d_11_pointwise", "Conv2d_13_pointwise", "", "", ""],
"layer_depth": [-1, -1, 512, 256, 256],
"conv_kernel_size": [-1, -1, 3, 3, 2],
}
with tf.control_dependencies([shape_assert]):
with slim.arg_scope(self._conv_hyperparams):
with tf.variable_scope(
"MobilenetV1", reuse=self._reuse_weights
) as scope:
_, image_features = mobilenet_v1.mobilenet_v1_base(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple),
final_endpoint="Conv2d_13_pointwise",
min_depth=self._min_depth,
depth_multiplier=self._depth_multiplier,
scope=scope,
)
feature_maps = feature_map_generators.multi_resolution_feature_maps(
feature_map_layout=feature_map_layout,
depth_multiplier=self._depth_multiplier,
min_depth=self._min_depth,
insert_1x1_conv=True,
image_features=image_features,
)
return feature_maps.values()
|
posthog | health | # Defines the healthcheck endpoints to be used by process orchestration system
# deployments to ensure:
# 1. new deployments are not marked as ready if they are misconfigured, e.g.
# kafka settings are wrong
# 2. pods that are dead for some reason are taken out of service
# 3. traffic is not routed to pods that we know we fail to handle it
# successfully. e.g. if an events pod can't reach kafka, we know that it
# shouldn't get http traffic routed to it.
# See
# https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/
# for generic k8s docs on healthchecks.
# I have specifically not reused the statuses in instance_status. These health
# endpoints are for a very specific purpose and we want to make sure that any
# changes to them are deliberate, as otherwise we could introduce unexpected
# behaviour in deployments.
from typing import Callable, Dict, List, Literal, cast, get_args
import amqp.exceptions
import django_redis.exceptions
import kombu.exceptions
import redis.exceptions
from clickhouse_driver.errors import Error as ClickhouseError
from django.core.cache import cache
from django.db import DEFAULT_DB_ALIAS
from django.db import Error as DjangoDatabaseError
from django.db import connections
from django.db.migrations.executor import MigrationExecutor
from django.http import HttpRequest, HttpResponse, JsonResponse
from posthog.celery import app
from posthog.client import sync_execute
from posthog.kafka_client.client import can_connect as can_connect_to_kafka
from structlog import get_logger
logger = get_logger(__name__)
ServiceRole = Literal["events", "web", "worker", "decide"]
service_dependencies: Dict[ServiceRole, List[str]] = {
"events": ["http", "kafka_connected"],
"web": [
"http",
# NOTE: we include Postgres because the way we use django means every request hits the DB
# https://posthog.slack.com/archives/C02E3BKC78F/p1679669676438729
"postgres",
"postgres_migrations_uptodate",
"cache",
# NOTE: we do not include clickhouse for web, as even without clickhouse we
# want to be able to display something to the user.
# "clickhouse"
# NOTE: we do not include "celery_broker" as web could still do lot's of
# useful things
# "celery_broker"
],
# NOTE: we can be pretty picky about what the worker needs as by its nature
# of reading from a durable queue rather that being required to perform
# request/response, we are more resilient to service downtime.
"worker": [
"http",
"postgres",
"postgres_migrations_uptodate",
"clickhouse",
"celery_broker",
],
"decide": ["http"],
}
# if atleast one of the checks is True, then the service is considered healthy
# for the given role
service_conditional_dependencies: Dict[ServiceRole, List[str]] = {
"decide": ["cache", "postgres"],
}
def livez(request: HttpRequest):
"""
Endpoint to be used to identify if the service is still functioning, in a
minimal state. Note that we do not check dependencies here, but are just
interested that the service hasn't completely locked up. It's a weaker check
than readyz but we can hit this harder such that we can take obviously
broken pods out asap.
"""
return JsonResponse({"http": True})
def readyz(request: HttpRequest):
"""
Validate that everything this process need to operate correctly is in place.
Returns a dict of checks to boolean status, returning 503 status if any of
them is non-True
This should be used to validate if the service is ready to serve traffic.
This can either be HTTP requests, or e.g. if a celery worker should be
considered ready such that old workers are removed, within a k8s deployment.
We accept a `exclude` parameter such that we can exclude certain checks from
producing a 5xx response. This way we can distinguish between the different
critical dependencies for each k8s deployment, e.g. the events pod 100%
needs kafka to operate. For the web server however, this is debatable. The
web server does a lot of stuff, and kafka is only used I believe for sending
merge person events, so we'd rather stay up with degraded functionality,
rather than take the website UI down.
We also accept an optional `role` parameter which can be any `ServiceRole`,
and can be used to specify that a subset of dependencies should be checked,
specific to the role a process is playing.
"""
exclude = set(request.GET.getlist("exclude", []))
role = request.GET.get("role", None)
if role and role not in get_args(ServiceRole):
return JsonResponse({"error": "InvalidRole"}, status=400)
available_checks = {
"clickhouse": is_clickhouse_connected,
"postgres": is_postgres_connected,
"postgres_migrations_uptodate": are_postgres_migrations_uptodate,
"kafka_connected": is_kafka_connected,
"celery_broker": is_celery_broker_connected,
"cache": is_cache_backend_connected,
}
conditional_checks = {}
if role:
# If we have a role, then limit the checks to a subset defined by the
# service_dependencies for this specific role, defaulting to all if we
# don't find a lookup
dependencies = service_dependencies.get(
cast(ServiceRole, role), available_checks.keys()
)
conditional_dependencies = (
service_conditional_dependencies.get(cast(ServiceRole, role)) or []
)
conditional_checks = {
name: check
for name, check in available_checks.items()
if name in conditional_dependencies
}
available_checks = {
name: check
for name, check in available_checks.items()
if name in dependencies
}
# Run each check and collect the status
# TODO: handle time bounding checks
# TODO: handle concurrent checks(?). Only if it becomes an issue, at which
# point maybe we're doing too many checks or they are too intensive.
evaluated_checks = {name: check() for name, check in available_checks.items()}
evaluated_conditional_checks = {
name: check() for name, check in conditional_checks.items()
}
prelim_status = (
200
if all(
check_status
for name, check_status in evaluated_checks.items()
if name not in exclude
)
else 503
)
if prelim_status == 200 and evaluated_conditional_checks:
# If there are any conditional checks, then run them
status = (
200
if any(
check_status for _, check_status in evaluated_conditional_checks.items()
)
else 503
)
else:
status = prelim_status
return JsonResponse(evaluated_checks, status=status)
def is_kafka_connected() -> bool:
"""
Check that we can reach Kafka,
Returns `True` if connected, `False` otherwise.
NOTE: we are only checking the Producer here, as currently this process
does not Consume from Kafka.
"""
return can_connect_to_kafka()
def is_postgres_connected() -> bool:
"""
Check we can reach the main postgres and perform a super simple query
Returns `True` if so, `False` otherwise
"""
try:
with connections[DEFAULT_DB_ALIAS].cursor() as cursor:
cursor.execute("SELECT 1")
except DjangoDatabaseError:
logger.debug("postgres_connection_failure", exc_info=True)
return False
return True
def are_postgres_migrations_uptodate() -> bool:
"""
Check that all migrations that the running version of the code knows about
have been applied.
Returns `True` if so, `False` otherwise
"""
try:
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
except DjangoDatabaseError:
logger.debug("postgres_migrations_check_failure", exc_info=True)
return False
return not plan
def is_clickhouse_connected() -> bool:
"""
Check we can perform a super simple Clickhouse query.
Returns `True` if so, `False` otherwise
"""
try:
sync_execute("SELECT 1")
except ClickhouseError:
logger.debug("clickhouse_connection_failure", exc_info=True)
return False
return True
def is_celery_broker_connected() -> bool:
"""
Check we can connect to the celery broker.
Returns `True` if so, `False` otherwise
"""
try:
# NOTE: Possibly not the best way to test that celery broker, it is
# possibly testing more than just is the broker reachable.
app.connection_for_read().ensure_connection(timeout=0, max_retries=0)
except (amqp.exceptions.AMQPError, kombu.exceptions.KombuError):
# NOTE: I wasn't sure exactly what could be raised, so we get all AMPQ
# and Kombu errors
logger.debug("celery_broker_connection_failure", exc_info=True)
return False
return True
def is_cache_backend_connected() -> bool:
"""
Checks if we can connect to redis, used for at least:
1. django cache
2. axes failure rate limiting
Returns `True` if so, `False` otherwise
"""
try:
# NOTE: we call has_key just as a method to force the cache to actually
# connect, otherwise it appears to be lazy, but perhaps there is a more
# convenient less fragile way to do this. It would be nice if we could
# have a `check_health` exposed in some generic way, as the python redis
# client does appear to have something for this task.
cache.has_key("_connection_test_key") # noqa: W601
except (redis.exceptions.RedisError, django_redis.exceptions.ConnectionInterrupted):
# NOTE: There doesn't seems to be a django cache specific exception
# here, so we will just have to add which ever exceptions the cache
# backend uses. For our case we're using django_redis, which does define
# some exceptions but appears to mostly just pass through the underlying
# redis exception.
logger.debug("cache_backend_connection_failure", exc_info=True)
return False
return True
def healthcheck_middleware(get_response: Callable[[HttpRequest], HttpResponse]):
"""
Middleware to serve up ready and liveness responses without executing any
inner middleware. Otherwise, if paths do not match these healthcheck
endpoints, we pass the request down the chain.
"""
def middleware(request: HttpRequest) -> HttpResponse:
if request.path == "/_readyz":
return readyz(request)
elif request.path == "/_livez":
return livez(request)
return get_response(request)
return middleware
|
hangups | setup | import os
import sys
from setuptools import setup
if sys.version_info < (3, 6):
raise RuntimeError("hangups requires Python 3.6+")
# Find __version__ without import that requires dependencies to be installed:
exec(open(os.path.join(os.path.dirname(__file__), "hangups/version.py")).read())
with open("README.rst") as f:
readme = f.read()
# Dependencies should be specified as a specific version or version range that
# is unlikely to break compatibility in the future. This is required to prevent
# hangups from breaking when new versions of dependencies are released,
# especially for end-users (non-developers) who use pip to install hangups.
install_requires = [
"ConfigArgParse>=0.11.0,<2",
"aiohttp>=3.7,<4",
"async-timeout>=2,<5",
"appdirs>=1.4,<1.5",
"readlike>=0.1.2,<0.2",
"requests>=2.6.0,<3", # uses semantic versioning (after 2.6)
"ReParser==1.4.3",
"protobuf>=3.1.0,<4",
"urwid>=1.3.1,<2.2",
"MechanicalSoup>=0.6.0,<0.13",
]
setup(
name="hangups",
version=__version__,
description=(
"the first third-party instant messaging client for Google " "Hangouts"
),
long_description=readme,
url="https://github.com/tdryer/hangups",
author="Tom Dryer",
author_email="tomdryer.com@gmail.com",
license="MIT",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Topic :: Communications :: Chat",
"Environment :: Console :: Curses",
],
packages=["hangups", "hangups.ui"],
install_requires=install_requires,
entry_points={
"console_scripts": [
"hangups=hangups.ui.__main__:main",
],
},
)
|
internal | daemons | """
This module provides an interface to spawn a detached task to be
run with httpie.internal.daemon_runner on a separate process. It is
based on DVC's daemon system.
https://github.com/iterative/dvc/blob/main/dvc/daemon.py
"""
import inspect
import os
import platform
import sys
from contextlib import suppress
from subprocess import DEVNULL, Popen
from typing import Dict, List
import httpie.__main__
from httpie.compat import is_frozen, is_windows
ProcessContext = Dict[str, str]
def _start_process(cmd: List[str], **kwargs) -> Popen:
prefix = [sys.executable]
# If it is frozen, sys.executable points to the binary (http).
# Otherwise it points to the python interpreter.
if not is_frozen:
main_entrypoint = httpie.__main__.__file__
prefix += [main_entrypoint]
return Popen(
prefix + cmd,
close_fds=True,
shell=False,
stdout=DEVNULL,
stderr=DEVNULL,
**kwargs,
)
def _spawn_windows(cmd: List[str], process_context: ProcessContext) -> None:
from subprocess import (
CREATE_NEW_PROCESS_GROUP,
CREATE_NO_WINDOW,
STARTF_USESHOWWINDOW,
STARTUPINFO,
)
# https://stackoverflow.com/a/7006424
# https://bugs.python.org/issue41619
creationflags = CREATE_NEW_PROCESS_GROUP | CREATE_NO_WINDOW
startupinfo = STARTUPINFO()
startupinfo.dwFlags |= STARTF_USESHOWWINDOW
_start_process(
cmd,
env=process_context,
creationflags=creationflags,
startupinfo=startupinfo,
)
def _spawn_posix(args: List[str], process_context: ProcessContext) -> None:
"""
Perform a double fork procedure* to detach from the parent
process so that we don't block the user even if their original
command's execution is done but the release fetcher is not.
[1]: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap11.html#tag_11_01_03
"""
from httpie.core import main
try:
pid = os.fork()
if pid > 0:
return
except OSError:
os._exit(1)
os.setsid()
try:
pid = os.fork()
if pid > 0:
os._exit(0)
except OSError:
os._exit(1)
# Close all standard inputs/outputs
sys.stdin.close()
sys.stdout.close()
sys.stderr.close()
if platform.system() == "Darwin":
# Double-fork is not reliable on MacOS, so we'll use a subprocess
# to ensure the task is isolated properly.
process = _start_process(args, env=process_context)
# Unlike windows, since we already completed the fork procedure
# we can simply join the process and wait for it.
process.communicate()
else:
os.environ.update(process_context)
with suppress(BaseException):
main(["http"] + args)
os._exit(0)
def _spawn(args: List[str], process_context: ProcessContext) -> None:
"""
Spawn a new process to run the given command.
"""
if is_windows:
_spawn_windows(args, process_context)
else:
_spawn_posix(args, process_context)
def spawn_daemon(task: str) -> None:
args = [task, "--daemon"]
process_context = os.environ.copy()
if not is_frozen:
file_path = os.path.abspath(inspect.stack()[0][1])
process_context["PYTHONPATH"] = os.path.dirname(
os.path.dirname(os.path.dirname(file_path))
)
_spawn(args, process_context)
|
util | ssl_ | from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256
from ..exceptions import InsecurePlatformWarning, SSLError
SSLContext = None
HAS_SNI = False
create_default_context = None
import errno
import warnings
try: # Test for SSL features
import ssl
from ssl import HAS_SNI # Has SNI?
from ssl import CERT_NONE, PROTOCOL_SSLv23, wrap_socket
except ImportError:
pass
try:
from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000
# A secure default.
# Sources for more information on TLS ciphers:
#
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# The general intent is:
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM over any AES-CBC for better performance and security,
# - use 3DES as fallback which is secure but slow,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
DEFAULT_CIPHERS = (
"ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:"
"DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:"
"!eNULL:!MD5"
)
try:
from ssl import SSLContext # Modern SSL?
except ImportError:
import sys
class SSLContext(object): # Platform-specific: Python 2 & 3.1
supports_set_ciphers = (2, 7) <= sys.version_info < (3,) or (
3,
2,
) <= sys.version_info
def __init__(self, protocol_version):
self.protocol = protocol_version
# Use default values from a real SSLContext
self.check_hostname = False
self.verify_mode = ssl.CERT_NONE
self.ca_certs = None
self.options = 0
self.certfile = None
self.keyfile = None
self.ciphers = None
def load_cert_chain(self, certfile, keyfile):
self.certfile = certfile
self.keyfile = keyfile
def load_verify_locations(self, location):
self.ca_certs = location
def set_ciphers(self, cipher_suite):
if not self.supports_set_ciphers:
raise TypeError(
"Your version of Python does not support setting "
"a custom cipher suite. Please upgrade to Python "
"2.7, 3.2, or later if you need this functionality."
)
self.ciphers = cipher_suite
def wrap_socket(self, socket, server_hostname=None):
warnings.warn(
"A true SSLContext object is not available. This prevents "
"urllib3 from configuring SSL appropriately and may cause "
"certain SSL connections to fail. For more information, see "
"https://urllib3.readthedocs.org/en/latest/security.html"
"#insecureplatformwarning.",
InsecurePlatformWarning,
)
kwargs = {
"keyfile": self.keyfile,
"certfile": self.certfile,
"ca_certs": self.ca_certs,
"cert_reqs": self.verify_mode,
"ssl_version": self.protocol,
}
if self.supports_set_ciphers: # Platform-specific: Python 2.7+
return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
else: # Platform-specific: Python 2.6
return wrap_socket(socket, **kwargs)
def assert_fingerprint(cert, fingerprint):
"""
Checks if given fingerprint matches the supplied certificate.
:param cert:
Certificate as bytes object.
:param fingerprint:
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
# Maps the length of a digest to a possible hash function producing
# this digest.
hashfunc_map = {
16: md5,
20: sha1,
32: sha256,
}
fingerprint = fingerprint.replace(":", "").lower()
digest_length, odd = divmod(len(fingerprint), 2)
if odd or digest_length not in hashfunc_map:
raise SSLError("Fingerprint is of invalid length.")
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
hashfunc = hashfunc_map[digest_length]
cert_digest = hashfunc(cert).digest()
if not cert_digest == fingerprint_bytes:
raise SSLError(
'Fingerprints did not match. Expected "{0}", got "{1}".'.format(
hexlify(fingerprint_bytes), hexlify(cert_digest)
)
)
def resolve_cert_reqs(candidate):
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
Defaults to :data:`ssl.CERT_NONE`.
If given a string it is assumed to be the name of the constant in the
:mod:`ssl` module or its abbrevation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
return CERT_NONE
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, "CERT_" + candidate)
return res
return candidate
def resolve_ssl_version(candidate):
"""
like resolve_cert_reqs
"""
if candidate is None:
return PROTOCOL_SSLv23
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, "PROTOCOL_" + candidate)
return res
return candidate
def create_urllib3_context(
ssl_version=None, cert_reqs=None, options=None, ciphers=None
):
"""All arguments have the same meaning as ``ssl_wrap_socket``.
By default, this function does a lot of the same work that
``ssl.create_default_context`` does on Python 3.4+. It:
- Disables SSLv2, SSLv3, and compression
- Sets a restricted set of server ciphers
If you wish to enable SSLv3, you can do::
from urllib3.util import ssl_
context = ssl_.create_urllib3_context()
context.options &= ~ssl_.OP_NO_SSLv3
You can do the same to enable compression (substituting ``COMPRESSION``
for ``SSLv3`` in the last line above).
:param ssl_version:
The desired protocol version to use. This will default to
PROTOCOL_SSLv23 which will negotiate the highest protocol that both
the server and your installation of OpenSSL support.
:param cert_reqs:
Whether to require the certificate verification. This defaults to
``ssl.CERT_REQUIRED``.
:param options:
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
:param ciphers:
Which cipher suites to allow the server to select.
:returns:
Constructed SSLContext object with specified options
:rtype: SSLContext
"""
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
if options is None:
options = 0
# SSLv2 is easily broken and is considered harmful and dangerous
options |= OP_NO_SSLv2
# SSLv3 has several problems and is now dangerous
options |= OP_NO_SSLv3
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
# (issue #309)
options |= OP_NO_COMPRESSION
context.options |= options
if getattr(context, "supports_set_ciphers", True): # Platform-specific: Python 2.6
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
context.verify_mode = cert_reqs
if (
getattr(context, "check_hostname", None) is not None
): # Platform-specific: Python 3.2
# We do our own verification, including fingerprints and alternative
# hostnames. So disable it here
context.check_hostname = False
return context
def ssl_wrap_socket(
sock,
keyfile=None,
certfile=None,
cert_reqs=None,
ca_certs=None,
server_hostname=None,
ssl_version=None,
ciphers=None,
ssl_context=None,
):
"""
All arguments except for server_hostname and ssl_context have the same
meaning as they do when using :func:`ssl.wrap_socket`.
:param server_hostname:
When SNI is supported, the expected hostname of the certificate
:param ssl_context:
A pre-made :class:`SSLContext` object. If none is provided, one will
be created using :func:`create_urllib3_context`.
:param ciphers:
A string of ciphers we wish the client to support. This is not
supported on Python 2.6 as the ssl module does not support it.
"""
context = ssl_context
if context is None:
context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
if ca_certs:
try:
context.load_verify_locations(ca_certs)
except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
raise SSLError(e)
# Py33 raises FileNotFoundError which subclasses OSError
# These are not equivalent unless we check the errno attribute
except OSError as e: # Platform-specific: Python 3.3 and beyond
if e.errno == errno.ENOENT:
raise SSLError(e)
raise
if certfile:
context.load_cert_chain(certfile, keyfile)
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
return context.wrap_socket(sock, server_hostname=server_hostname)
return context.wrap_socket(sock)
|
tasks | notify_about_gaps_in_schedule | import datetime
import pytz
from apps.slack.utils import format_datetime_to_slack_with_time, post_message_to_channel
from celery.utils.log import get_task_logger
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
from django.core.cache import cache
from django.utils import timezone
task_logger = get_task_logger(__name__)
@shared_dedicated_queue_retry_task()
def start_check_gaps_in_schedule():
from apps.schedules.models import OnCallSchedule
task_logger.info("Start start_check_gaps_in_schedule")
schedules = OnCallSchedule.objects.all()
for schedule in schedules:
check_gaps_in_schedule.apply_async((schedule.pk,))
task_logger.info("Finish start_check_gaps_in_schedule")
@shared_dedicated_queue_retry_task()
def check_gaps_in_schedule(schedule_pk):
from apps.schedules.models import OnCallSchedule
task_logger.info(f"Start check_gaps_in_schedule {schedule_pk}")
try:
schedule = OnCallSchedule.objects.get(
pk=schedule_pk,
)
except OnCallSchedule.DoesNotExist:
task_logger.info(
f"Tried to check_gaps_in_schedule for non-existing schedule {schedule_pk}"
)
return
schedule.check_gaps_for_next_week()
task_logger.info(f"Finish check_gaps_in_schedule {schedule_pk}")
@shared_dedicated_queue_retry_task()
def start_notify_about_gaps_in_schedule():
from apps.schedules.models import OnCallSchedule
task_logger.info("Start start_notify_about_gaps_in_schedule")
today = timezone.now().date()
week_ago = today - timezone.timedelta(days=7)
schedules = OnCallSchedule.objects.filter(
gaps_report_sent_at__lte=week_ago,
channel__isnull=False,
)
for schedule in schedules:
notify_about_gaps_in_schedule.apply_async((schedule.pk,))
task_logger.info("Finish start_notify_about_gaps_in_schedule")
@shared_dedicated_queue_retry_task()
def notify_about_gaps_in_schedule(schedule_pk):
from apps.schedules.models import OnCallSchedule
task_logger.info(f"Start notify_about_gaps_in_schedule {schedule_pk}")
cache_key = get_cache_key_notify_about_gaps_in_schedule(schedule_pk)
cached_task_id = cache.get(cache_key)
current_task_id = notify_about_gaps_in_schedule.request.id
if current_task_id != cached_task_id and cached_task_id is not None:
return
try:
schedule = OnCallSchedule.objects.get(pk=schedule_pk, channel__isnull=False)
except OnCallSchedule.DoesNotExist:
task_logger.info(
f"Tried to notify_about_gaps_in_schedule for non-existing schedule {schedule_pk}"
)
return
now = timezone.now()
events = schedule.final_events(now, now + datetime.timedelta(days=7))
gaps = [event for event in events if event["is_gap"] and not event["is_empty"]]
schedule.gaps_report_sent_at = now.date()
if len(gaps) != 0:
schedule.has_gaps = True
text = f"There are time periods that are unassigned in *{schedule.name}* on-call schedule.\n"
for idx, gap in enumerate(gaps):
if gap["start"]:
start_verbal = format_datetime_to_slack_with_time(
gap["start"].astimezone(pytz.UTC).timestamp()
)
else:
start_verbal = "..."
if gap["end"]:
end_verbal = format_datetime_to_slack_with_time(
gap["end"].astimezone(pytz.UTC).timestamp()
)
else:
end_verbal = "..."
text += f"From {start_verbal} to {end_verbal} (your TZ)\n"
if idx != len(gaps) - 1:
text += "\n\n"
post_message_to_channel(schedule.organization, schedule.channel, text)
else:
schedule.has_gaps = False
schedule.save(update_fields=["gaps_report_sent_at", "has_gaps"])
task_logger.info(f"Finish notify_about_gaps_in_schedule {schedule_pk}")
def get_cache_key_notify_about_gaps_in_schedule(schedule_pk):
CACHE_KEY_PREFIX = "notify_about_gaps_in_schedule"
return f"{CACHE_KEY_PREFIX}_{schedule_pk}"
@shared_dedicated_queue_retry_task
def schedule_notify_about_gaps_in_schedule(schedule_pk):
CACHE_LIFETIME = 600
START_TASK_DELAY = 60
task = notify_about_gaps_in_schedule.apply_async(
args=[schedule_pk], countdown=START_TASK_DELAY
)
cache_key = get_cache_key_notify_about_gaps_in_schedule(schedule_pk)
cache.set(cache_key, task.id, timeout=CACHE_LIFETIME)
|
disc | eaclog | # -*- coding: utf-8 -*-
#
# fix-header: nolicense
# MIT License
#
# Copyright(c) 2018 Konstantin Mochalov
# Copyright(c) 2022 Philipp Wolfer
# Copyright(c) 2022 Jeffrey Bosboom
#
# Original code from https://gist.github.com/kolen/765526
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import re
from picard.disc.utils import TocEntry, calculate_mb_toc_numbers
from picard.util import detect_unicode_encoding
RE_TOC_TABLE_HEADER = re.compile(
r""" \s*
\s*.+\s+ \| # track
\s+.+\s+ \| # start
\s+.+\s+ \| # length
\s+.+\s+ \| # start sector
\s+.+\s*$ # end sector
""",
re.VERBOSE,
)
RE_TOC_TABLE_LINE = re.compile(
r"""
\s*
(?P<num>\d+)
\s*\|\s*
(?P<start_time>[0-9:.]+)
\s*\|\s*
(?P<length_time>[0-9:.]+)
\s*\|\s*
(?P<start_sector>\d+)
\s*\|\s*
(?P<end_sector>\d+)
\s*$""",
re.VERBOSE,
)
def filter_toc_entries(lines):
"""
Take iterator of lines, return iterator of toc entries
"""
# Search the TOC table header
for line in lines:
# to allow internationalized EAC output where column headings
# may differ
if RE_TOC_TABLE_HEADER.match(line):
# Skip over the table header separator
next(lines)
break
for line in lines:
m = RE_TOC_TABLE_LINE.search(line)
if not m:
break
yield TocEntry(int(m["num"]), int(m["start_sector"]), int(m["end_sector"]))
def toc_from_file(path):
"""Reads EAC / XLD / fre:ac log files, generates MusicBrainz disc TOC listing for use as discid.
Warning: may work wrong for discs having data tracks. May generate wrong
results on other non-standard cases."""
encoding = detect_unicode_encoding(path)
with open(path, "r", encoding=encoding) as f:
return calculate_mb_toc_numbers(filter_toc_entries(f))
|
navigation | mtms | import random
import time
import invesalius.data.coregistration as dcr
import numpy as np
import pandas as pd
import win32com.client
class mTMS:
def __init__(self):
# TODO: create dialog to input mtms_path and vi
mtms_path = "C:\\mTMS\\Labview\\Builds\\mTMS 3.1 hack"
vipath = (
mtms_path
+ "\\mTMS ActiveX Server\\mTMS ActiveX Server.exe\\mTMS ActiveX Server.vi"
)
# Connect to the ActiveX server
mtms_app = win32com.client.Dispatch("MTMSActiveXServer.Application")
self.vi = mtms_app.getvireference(vipath)
# Log name
self.log_name = "mtms_subject_00_run_0"
# self.vi.SetControlValue(self.log_name, 'Experiment 1a')
self.intensity = self.vi.GetControlValue("Get Intensity")
# self.intensity = 20
self.df = pd.DataFrame(
[],
columns=["mTMS_target", "brain_target(nav)", "coil_pose(nav)", "intensity"],
)
def CheckTargets(self, coil_pose, brain_target_list):
for brain_target in brain_target_list:
distance = dcr.ComputeRelativeDistanceToTarget(
target_coord=brain_target, img_coord=coil_pose
)
offset = self.GetOffset(distance)
mTMS_target, mTMS_index_target = self.FindmTMSParameters(offset)
if not len(mTMS_index_target[0]):
print("Not possible to stimulate the target: ", offset)
return False
return True
def UpdateTargetSequence(self, coil_pose, brain_target_list):
if brain_target_list:
# Do I really need to check this? Or I can apply only the possible stimuli?
if self.CheckTargets(coil_pose, brain_target_list):
number_of_stim = 3
randomized_brain_target_list = brain_target_list.copy()
random.shuffle(randomized_brain_target_list)
for brain_target in randomized_brain_target_list:
for x in range(number_of_stim):
self.UpdateTarget(coil_pose, brain_target)
time.sleep(random.randrange(300, 500, 1) / 100)
self.SaveSequence()
def UpdateTarget(self, coil_pose, brain_target):
coil_pose_flip = coil_pose.copy()
brain_target_flip = brain_target.copy()
coil_pose_flip[1] = -coil_pose_flip[1]
brain_target_flip[1] = -brain_target_flip[1]
distance = dcr.ComputeRelativeDistanceToTarget(
target_coord=coil_pose_flip, img_coord=brain_target_flip
)
offset = self.GetOffset(distance)
mTMS_target, mTMS_index_target = self.FindmTMSParameters(offset)
if len(mTMS_index_target[0]):
self.SendToMTMS(mTMS_index_target[0] + 1)
new_row = {
"mTMS_target": mTMS_target,
"brain_target(nav)": brain_target_flip,
"coil_pose(nav)": coil_pose_flip,
"intensity": self.intensity,
}
self.df = self.df.append((pd.DataFrame([new_row], columns=self.df.columns)))
else:
print("Target is not valid. The offset is: ", offset)
def GetOffset(self, distance):
offset_xy = [int(np.round(x)) for x in distance[:2]]
offset_rz = int(np.round(distance[-1] / 15) * 15)
offset = [-int(offset_xy[1]), int(offset_xy[0]), int(offset_rz)]
return offset
def FindmTMSParameters(self, offset):
# fname = "C:\\mTMS\\mTMS parameters\\PP\\PP31 mikael 1mm 15deg 5-coil grid.txt"
fname = self.vi.GetControlValue("Get Pulse-parameters file")
with open(fname, "r") as the_file:
all_data = [line.strip() for line in the_file.readlines()]
data = all_data[18:]
data = np.array([line.split("\t") for line in data])
separator = "_"
target = separator.join(["{}".format(x) for x in offset])
target_index = np.where(data[:, 0] == target)
return target, target_index
def SendToMTMS(self, target):
# Manipulate intensity
self.intensity = self.vi.GetControlValue("Get Intensity")
print("Intensity: ", str(self.intensity))
# self.vi.SetControlValue('New Intensity', 40)
# self.vi.SetControlValue('Set Intensity', True)
# Update the Pulse - parameters row and wait until the change has been processed
self.vi.SetControlValue("New Pulse-parameters row", int(target))
self.vi.SetControlValue("Set Pulse-parameters row", True)
print("Updating brain target: ", int(target))
while self.vi.GetControlValue("Set Pulse-parameters row"):
pass
time.sleep(0.3)
print("Charging capacitors...")
while not self.vi.GetControlValue("Get Ready to stimulate"):
pass
# TODO: remove stimulation from here. The user should use the mtms interface to perform the stimuli
# Stimulate
print("Stimulating")
self.vi.SetControlValue("Stimulate", True)
def SaveSequence(self):
timestamp = time.localtime(time.time())
stamp_date = "{:0>4d}{:0>2d}{:0>2d}".format(
timestamp.tm_year, timestamp.tm_mon, timestamp.tm_mday
)
stamp_time = "{:0>2d}{:0>2d}{:0>2d}".format(
timestamp.tm_hour, timestamp.tm_min, timestamp.tm_sec
)
sep = "_"
parts = [stamp_date, stamp_time, self.log_name, "sequence"]
default_filename = sep.join(parts) + ".csv"
self.df.to_csv(default_filename, sep="\t", encoding="utf-8", index=False)
|
digital | generic_mod_demod | #
# Copyright 2005,2006,2007,2009,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
"""
Generic modulation and demodulation.
"""
# See gnuradio-examples/python/digital for examples
import math
from gnuradio import analog, blocks, filter, gr
from . import digital_python as digital
from .modulation_utils import extract_kwargs_from_options_for_class
from .utils import mod_codes
# default values (used in __init__ and add_options)
_def_samples_per_symbol = 2
_def_excess_bw = 0.35
_def_verbose = False
_def_log = False
_def_truncate = False
# Frequency correction
_def_freq_bw = 2 * math.pi / 100.0
# Symbol timing recovery
_def_timing_bw = 2 * math.pi / 100.0
_def_timing_max_dev = 1.5
# Fine frequency / Phase correction
_def_phase_bw = 2 * math.pi / 100.0
# Number of points in constellation
_def_constellation_points = 16
# Whether differential coding is used.
_def_differential = False
def add_common_options(parser):
"""
Sets options common to both modulator and demodulator.
"""
parser.add_option(
"-p",
"--constellation-points",
type="int",
default=_def_constellation_points,
help="set the number of constellation points (must be a power of 2 for psk, power of 4 for QAM) [default=%default]",
)
parser.add_option(
"",
"--non-differential",
action="store_false",
dest="differential",
help="do not use differential encoding [default=False]",
)
parser.add_option(
"",
"--differential",
action="store_true",
dest="differential",
default=True,
help="use differential encoding [default=%default]",
)
parser.add_option(
"",
"--mod-code",
type="choice",
choices=mod_codes.codes,
default=mod_codes.NO_CODE,
help="Select modulation code from: %s [default=%%default]"
% (", ".join(mod_codes.codes),),
)
parser.add_option(
"",
"--excess-bw",
type="float",
default=_def_excess_bw,
help="set RRC excess bandwidth factor [default=%default]",
)
# /////////////////////////////////////////////////////////////////////////////
# Generic modulator
# /////////////////////////////////////////////////////////////////////////////
class generic_mod(gr.hier_block2):
"""
Hierarchical block for RRC-filtered differential generic modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
Args:
constellation: determines the modulation type (gnuradio.digital.digital_constellation)
samples_per_symbol: samples per baud >= 2 (float)
differential: whether to use differential encoding (boolean)
pre_diff_code: whether to use apply a pre-differential mapping (boolean)
excess_bw: Root-raised cosine filter excess bandwidth (float)
verbose: Print information about modulator? (boolean)
log: Log modulation data to files? (boolean)
truncate: Truncate the modulated output to account for the RRC filter response (boolean)
"""
def __init__(
self,
constellation,
differential=_def_differential,
samples_per_symbol=_def_samples_per_symbol,
pre_diff_code=True,
excess_bw=_def_excess_bw,
verbose=_def_verbose,
log=_def_log,
truncate=_def_truncate,
):
gr.hier_block2.__init__(
self,
"generic_mod",
# Input signature
gr.io_signature(1, 1, gr.sizeof_char),
gr.io_signature(1, 1, gr.sizeof_gr_complex),
) # Output signature
self._constellation = constellation
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._differential = differential
# Only apply a predifferential coding if the constellation also supports it.
self.pre_diff_code = pre_diff_code and self._constellation.apply_pre_diff_code()
if self._samples_per_symbol < 2:
raise TypeError("sps must be >= 2, is %f" % self._samples_per_symbol)
arity = pow(2, self.bits_per_symbol())
# turn bytes into k-bit vectors
self.bytes2chunks = blocks.packed_to_unpacked_bb(
self.bits_per_symbol(), gr.GR_MSB_FIRST
)
if self.pre_diff_code:
self.symbol_mapper = digital.map_bb(self._constellation.pre_diff_code())
if differential:
self.diffenc = digital.diff_encoder_bb(arity)
self.chunks2symbols = digital.chunks_to_symbols_bc(self._constellation.points())
# pulse shaping filter
nfilts = 32
ntaps_per_filt = 11
# make nfilts filters of ntaps each
ntaps = nfilts * ntaps_per_filt * int(self._samples_per_symbol)
self.rrc_taps = filter.firdes.root_raised_cosine(
nfilts, # gain
nfilts, # sampling rate based on 32 filters in resampler
1.0, # symbol rate
self._excess_bw, # excess bandwidth (roll-off factor)
ntaps,
)
self.rrc_filter = filter.pfb_arb_resampler_ccf(
self._samples_per_symbol, self.rrc_taps
)
# Remove the filter transient at the beginning of the transmission
if truncate:
fsps = float(self._samples_per_symbol)
# Length of delay through rrc filter
len_filt_delay = int((ntaps_per_filt * fsps * fsps - fsps) / 2.0)
self.skiphead = blocks.skiphead(gr.sizeof_gr_complex * 1, len_filt_delay)
# Connect
self._blocks = [self, self.bytes2chunks]
if self.pre_diff_code:
self._blocks.append(self.symbol_mapper)
if differential:
self._blocks.append(self.diffenc)
self._blocks += [self.chunks2symbols, self.rrc_filter]
if truncate:
self._blocks.append(self.skiphead)
self._blocks.append(self)
self.connect(*self._blocks)
if verbose:
self._print_verbage()
if log:
self._setup_logging()
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self): # static method that's also callable on an instance
return self._constellation.bits_per_symbol()
@staticmethod
def add_options(parser):
"""
Adds generic modulation options to the standard parser
"""
add_common_options(parser)
def extract_kwargs_from_options(cls, options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return extract_kwargs_from_options_for_class(cls, options)
extract_kwargs_from_options = classmethod(extract_kwargs_from_options)
def _print_verbage(self):
print("\nModulator:")
print("bits per symbol: %d" % self.bits_per_symbol())
print("RRC roll-off factor: %.2f" % self._excess_bw)
def _setup_logging(self):
print("Modulation logging turned on.")
self.connect(
self.bytes2chunks, blocks.file_sink(gr.sizeof_char, "tx_bytes2chunks.8b")
)
if self.pre_diff_code:
self.connect(
self.symbol_mapper,
blocks.file_sink(gr.sizeof_char, "tx_symbol_mapper.8b"),
)
if self._differential:
self.connect(
self.diffenc, blocks.file_sink(gr.sizeof_char, "tx_diffenc.8b")
)
self.connect(
self.chunks2symbols,
blocks.file_sink(gr.sizeof_gr_complex, "tx_chunks2symbols.32fc"),
)
self.connect(
self.rrc_filter,
blocks.file_sink(gr.sizeof_gr_complex, "tx_rrc_filter.32fc"),
)
# /////////////////////////////////////////////////////////////////////////////
# Generic demodulator
#
# Differentially coherent detection of differentially encoded generically
# modulated signal.
# /////////////////////////////////////////////////////////////////////////////
class generic_demod(gr.hier_block2):
"""
Hierarchical block for RRC-filtered differential generic demodulation.
The input is the complex modulated signal at baseband.
The output is a stream of bits packed 1 bit per byte (LSB)
Args:
constellation: determines the modulation type (gnuradio.digital.digital_constellation)
samples_per_symbol: samples per baud >= 2 (float)
differential: whether to use differential encoding (boolean)
pre_diff_code: whether to use apply a pre-differential mapping (boolean)
excess_bw: Root-raised cosine filter excess bandwidth (float)
freq_bw: loop filter lock-in bandwidth (float)
timing_bw: timing recovery loop lock-in bandwidth (float)
phase_bw: phase recovery loop bandwidth (float)
verbose: Print information about modulator? (boolean)
log: Log modulation data to files? (boolean)
"""
def __init__(
self,
constellation,
differential=_def_differential,
samples_per_symbol=_def_samples_per_symbol,
pre_diff_code=True,
excess_bw=_def_excess_bw,
freq_bw=_def_freq_bw,
timing_bw=_def_timing_bw,
phase_bw=_def_phase_bw,
verbose=_def_verbose,
log=_def_log,
):
gr.hier_block2.__init__(
self,
"generic_demod",
# Input signature
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_char),
) # Output signature
self._constellation = constellation
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._phase_bw = phase_bw
self._freq_bw = freq_bw
self._timing_bw = timing_bw
self._timing_max_dev = _def_timing_max_dev
self._differential = differential
if self._samples_per_symbol < 2:
raise TypeError("sps must be >= 2, is %d" % self._samples_per_symbol)
# Only apply a predifferential coding if the constellation also supports it.
self.pre_diff_code = pre_diff_code and self._constellation.apply_pre_diff_code()
arity = pow(2, self.bits_per_symbol())
nfilts = 32
ntaps = 11 * int(self._samples_per_symbol * nfilts)
# Automatic gain control
self.agc = analog.agc2_cc(0.6e-1, 1e-3, 1, 1)
# Frequency correction
fll_ntaps = 55
self.freq_recov = digital.fll_band_edge_cc(
self._samples_per_symbol, self._excess_bw, fll_ntaps, self._freq_bw
)
# symbol timing recovery with RRC data filter
taps = filter.firdes.root_raised_cosine(
nfilts, nfilts * self._samples_per_symbol, 1.0, self._excess_bw, ntaps
)
self.time_recov = digital.pfb_clock_sync_ccf(
self._samples_per_symbol,
self._timing_bw,
taps,
nfilts,
nfilts // 2,
self._timing_max_dev,
)
fmin = -0.25
fmax = 0.25
self.receiver = digital.constellation_receiver_cb(
self._constellation.base(), self._phase_bw, fmin, fmax
)
# Do differential decoding based on phase change of symbols
if differential:
self.diffdec = digital.diff_decoder_bb(arity)
if self.pre_diff_code:
self.symbol_mapper = digital.map_bb(
mod_codes.invert_code(self._constellation.pre_diff_code())
)
# unpack the k bit vector into a stream of bits
self.unpack = blocks.unpack_k_bits_bb(self.bits_per_symbol())
if verbose:
self._print_verbage()
if log:
self._setup_logging()
# Connect and Initialize base class
self._blocks = [self, self.agc, self.freq_recov, self.time_recov, self.receiver]
if differential:
self._blocks.append(self.diffdec)
if self.pre_diff_code:
self._blocks.append(self.symbol_mapper)
self._blocks += [self.unpack, self]
self.connect(*self._blocks)
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self):
return self._constellation.bits_per_symbol()
def _print_verbage(self):
print("\nDemodulator:")
print("bits per symbol: %d" % self.bits_per_symbol())
print("RRC roll-off factor: %.2f" % self._excess_bw)
print("FLL bandwidth: %.2e" % self._freq_bw)
print("Timing bandwidth: %.2e" % self._timing_bw)
print("Phase bandwidth: %.2e" % self._phase_bw)
def _setup_logging(self):
print("Modulation logging turned on.")
self.connect(self.agc, blocks.file_sink(gr.sizeof_gr_complex, "rx_agc.32fc"))
self.connect(
(self.freq_recov, 0),
blocks.file_sink(gr.sizeof_gr_complex, "rx_freq_recov.32fc"),
)
self.connect(
(self.freq_recov, 1),
blocks.file_sink(gr.sizeof_float, "rx_freq_recov_freq.32f"),
)
self.connect(
(self.freq_recov, 2),
blocks.file_sink(gr.sizeof_float, "rx_freq_recov_phase.32f"),
)
self.connect(
(self.freq_recov, 3),
blocks.file_sink(gr.sizeof_float, "rx_freq_recov_error.32f"),
)
self.connect(
(self.time_recov, 0),
blocks.file_sink(gr.sizeof_gr_complex, "rx_time_recov.32fc"),
)
self.connect(
(self.time_recov, 1),
blocks.file_sink(gr.sizeof_float, "rx_time_recov_error.32f"),
)
self.connect(
(self.time_recov, 2),
blocks.file_sink(gr.sizeof_float, "rx_time_recov_rate.32f"),
)
self.connect(
(self.time_recov, 3),
blocks.file_sink(gr.sizeof_float, "rx_time_recov_phase.32f"),
)
self.connect(
(self.receiver, 0), blocks.file_sink(gr.sizeof_char, "rx_receiver.8b")
)
self.connect(
(self.receiver, 1),
blocks.file_sink(gr.sizeof_float, "rx_receiver_error.32f"),
)
self.connect(
(self.receiver, 2),
blocks.file_sink(gr.sizeof_float, "rx_receiver_phase.32f"),
)
self.connect(
(self.receiver, 3),
blocks.file_sink(gr.sizeof_float, "rx_receiver_freq.32f"),
)
if self._differential:
self.connect(
self.diffdec, blocks.file_sink(gr.sizeof_char, "rx_diffdec.8b")
)
if self.pre_diff_code:
self.connect(
self.symbol_mapper,
blocks.file_sink(gr.sizeof_char, "rx_symbol_mapper.8b"),
)
self.connect(self.unpack, blocks.file_sink(gr.sizeof_char, "rx_unpack.8b"))
@staticmethod
def add_options(parser):
"""
Adds generic demodulation options to the standard parser
"""
# Add options shared with modulator.
add_common_options(parser)
# Add options specific to demodulator.
parser.add_option(
"",
"--freq-bw",
type="float",
default=_def_freq_bw,
help="set frequency lock loop lock-in bandwidth [default=%default]",
)
parser.add_option(
"",
"--phase-bw",
type="float",
default=_def_phase_bw,
help="set phase tracking loop lock-in bandwidth [default=%default]",
)
parser.add_option(
"",
"--timing-bw",
type="float",
default=_def_timing_bw,
help="set timing symbol sync loop gain lock-in bandwidth [default=%default]",
)
def extract_kwargs_from_options(cls, options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return extract_kwargs_from_options_for_class(cls, options)
extract_kwargs_from_options = classmethod(extract_kwargs_from_options)
shared_demod_args = """ samples_per_symbol: samples per baud >= 2 (float)
excess_bw: Root-raised cosine filter excess bandwidth (float)
freq_bw: loop filter lock-in bandwidth (float)
timing_bw: timing recovery loop lock-in bandwidth (float)
phase_bw: phase recovery loop bandwidth (float)
verbose: Print information about modulator? (boolean)
log: Log modulation data to files? (boolean)
"""
shared_mod_args = """ samples_per_symbol: samples per baud >= 2 (float)
excess_bw: Root-raised cosine filter excess bandwidth (float)
verbose: Print information about modulator? (boolean)
log: Log modulation data to files? (boolean)
"""
|
sk1-wx | setup | #!/usr/bin/env python
#
# Setup script for sK1 2.x
#
# Copyright (C) 2013-2018 by Ihor E. Novikov
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from __future__ import print_function
"""
Usage:
--------------------------------------------------------------------------
to build package: python setup.py build
to install package: python setup.py install
to remove installation: python setup.py uninstall
--------------------------------------------------------------------------
to create source distribution: python setup.py sdist
--------------------------------------------------------------------------
to create binary RPM distribution: python setup.py bdist_rpm
--------------------------------------------------------------------------
to create binary DEB distribution: python setup.py bdist_deb
--------------------------------------------------------------------------.
Help on available distribution formats: --help-formats
"""
import datetime
import os
import shutil
import sys
from distutils.core import setup
############################################################
# Subprojects resolving
CLEAR_UTILS = False
if not os.path.exists("./utils"):
if os.path.exists("../build-utils/src/utils"):
os.system("ln -s ../build-utils/src/utils utils")
else:
if not os.path.exists("./subproj/build-utils/src/utils"):
if not os.path.exists("./subproj"):
os.makedirs("./subproj")
os.system(
"git clone https://github.com/sk1project/build-utils "
"subproj/build-utils"
)
os.system("ln -s ./subproj/build-utils/src/utils utils")
CLEAR_UTILS = True
CLEAR_UC2 = False
if not os.path.exists("./src/uc2"):
if os.path.exists("../uniconvertor/src/uc2"):
os.system("ln -s ../../uniconvertor/src/uc2 src/uc2")
else:
if not os.path.exists("./subproj/uniconvertor/src/uc2"):
if not os.path.exists("./subproj"):
os.makedirs("./subproj")
os.system(
"git clone https://github.com/sk1project/uniconvertor "
"subproj/uniconvertor"
)
os.system("ln -s ../subproj/uniconvertor/src/uc2 src/uc2")
CLEAR_UC2 = True
CLEAR_WAL = False
if not os.path.exists("./src/wal"):
if os.path.exists("../wal/src/wal"):
os.system("ln -s ../../wal/src/wal src/wal")
else:
if not os.path.exists("./subproj/wal/src/wal"):
if not os.path.exists("./subproj"):
os.makedirs("./subproj")
os.system("git clone https://github.com/sk1project/wal " "subproj/wal")
os.system("ln -s ../subproj/wal/src/wal src/wal")
CLEAR_WAL = True
############################################################
import utils.deb
import utils.rpm
from utils import build, dependencies, fsutils, po
from utils.native_mods import make_modules
sys.path.insert(1, os.path.abspath("./src"))
CURRENT_PATH = os.path.dirname(os.path.abspath(__file__))
from sk1 import appconst
############################################################
# Flags
############################################################
UPDATE_MODULES = False
DEB_PACKAGE = False
RPM_PACKAGE = False
CLEAR_BUILD = False
############################################################
# Package description
############################################################
NAME = appconst.APPNAME
VERSION = appconst.VERSION + appconst.REVISION
DESCRIPTION = "Vector graphics editor for prepress"
AUTHOR = "Ihor E. Novikov"
AUTHOR_EMAIL = "sk1.project.org@gmail.com"
MAINTAINER = AUTHOR
MAINTAINER_EMAIL = AUTHOR_EMAIL
LICENSE = "GPL v3"
URL = "https://sk1project.net"
DOWNLOAD_URL = URL
CLASSIFIERS = [
"Development Status :: 5 - Stable",
"Environment :: Desktop",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: GPL v3",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: C",
"Topic :: Multimedia :: Graphics :: Editors :: Vector-Based",
]
LONG_DESCRIPTION = """
sK1 is an open source vector graphics editor similar to CorelDRAW,
Adobe Illustrator, or Freehand. First of all sK1 is oriented for prepress
industry, therefore works with CMYK color space and produces CMYK-based PDF
and postscript output.
sK1 Project (https://sk1project.net),
Copyright (C) 2004-%s sK1 Project Team
""" % str(datetime.date.today().year)
LONG_DEB_DESCRIPTION = """ .
sK1 is an open source vector graphics editor similar to CorelDRAW,
Adobe Illustrator, or Freehand. First of all sK1 is oriented for prepress
industry, therefore works with CMYK color space and produces CMYK-based PDF
and postscript output.
.
sK1 Project (https://sk1project.net),
Copyright (C) 2004-%s sK1 Project Team
.
""" % str(datetime.date.today().year)
############################################################
# Build data
############################################################
install_path = "/usr/lib/%s-wx-%s" % (NAME, VERSION)
os.environ["APP_INSTALL_PATH"] = "%s" % (install_path,)
src_path = "src"
include_path = "/usr/include"
modules = []
scripts = [
"src/script/sk1",
]
deb_scripts = []
data_files = [
(
"/usr/share/applications",
[
"src/sk1.desktop",
],
),
(
"/usr/share/pixmaps",
[
"src/sk1.png",
"src/sk1.xpm",
],
),
(
"/usr/share/icons/hicolor/scalable/apps",
[
"src/sk1.svg",
],
),
(
install_path,
[
"LICENSE",
],
),
]
LOCALES_PATH = "src/sk1/share/locales"
EXCLUDES = [
"sword",
]
############################################################
deb_depends = ""
rpm_depends = ""
############################################################
dirs = fsutils.get_dirs_tree("src/sk1/share")
share_dirs = []
for item in dirs:
share_dirs.append(os.path.join(item[8:], "*.*"))
package_data = {
"sk1": share_dirs,
}
def build_locales():
src_path = "po-sk1"
dest_path = LOCALES_PATH
po.build_locales(src_path, dest_path, "sk1")
############################################################
# Main build procedure
############################################################
if len(sys.argv) == 1:
print("Please specify build options!")
print(__doc__)
sys.exit(0)
if len(sys.argv) > 1:
if sys.argv[1] == "bdist_rpm":
CLEAR_BUILD = True
RPM_PACKAGE = True
sys.argv[1] = "sdist"
rpm_depends = dependencies.get_sk1_rpm_depend()
elif sys.argv[1] == "bdist_deb":
DEB_PACKAGE = True
CLEAR_BUILD = True
sys.argv[1] = "build"
deb_depends = dependencies.get_sk1_deb_depend()
elif sys.argv[1] == "uninstall":
if os.path.isdir(install_path):
# removing sk1 folder
print("REMOVE: " + install_path)
os.system("rm -rf " + install_path)
# removing scripts
for item in scripts:
filename = os.path.basename(item)
print("REMOVE: /usr/bin/" + filename)
os.system("rm -rf /usr/bin/" + filename)
# removing data files
for item in data_files:
location = item[0]
file_list = item[1]
for file_item in file_list:
filename = os.path.basename(file_item)
filepath = os.path.join(location, filename)
if not os.path.isfile(filepath):
continue
print("REMOVE: " + filepath)
os.system("rm -rf " + filepath)
print("Desktop database update: ", end=" ")
os.system("update-desktop-database")
print("DONE!")
else:
print("sK1 installation is not found!")
sys.exit(0)
elif sys.argv[1] == "update_pot":
paths = ["src/sk1", "src/uc2"]
po.build_pot(paths, "po-sk1/sk1.pot", False)
sys.exit(0)
elif sys.argv[1] == "build_locales":
build_locales()
sys.exit(0)
# Preparing start script
src_script = "src/script/sk1.tmpl"
dst_script = "src/script/sk1"
fileptr = open(src_script, "rb")
fileptr2 = open(dst_script, "wb")
while True:
line = fileptr.readline()
if line == "":
break
if "$APP_INSTALL_PATH" in line:
line = line.replace("$APP_INSTALL_PATH", install_path)
fileptr2.write(line)
fileptr.close()
fileptr2.close()
# Preparing setup.cfg
############################################################
with open("setup.cfg.in", "rb") as fileptr:
content = fileptr.read()
if rpm_depends:
content += "\nrequires = " + rpm_depends
with open("setup.cfg", "wb") as fileptr:
fileptr.write(content)
# Preparing locales
############################################################
if not os.path.exists(LOCALES_PATH):
build_locales()
############################################################
# Native extensions
############################################################
modules += make_modules(src_path, include_path)
############################################################
# Setup routine
############################################################
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
license=LICENSE,
url=URL,
download_url=DOWNLOAD_URL,
long_description=LONG_DESCRIPTION,
classifiers=CLASSIFIERS,
packages=build.get_source_structure(excludes=EXCLUDES),
package_dir=build.get_package_dirs(excludes=EXCLUDES),
package_data=package_data,
data_files=data_files,
scripts=scripts,
ext_modules=modules,
)
if sys.argv[1] == "build":
dependencies.install_sk1_pip_deps()
############################################################
# .py source compiling
############################################################
if not UPDATE_MODULES:
build.compile_sources()
############################################################
# This section for developing purpose only
# Command 'python setup.py build_update' allows
# automating build and copying of native extensions
# into package directory
############################################################
if UPDATE_MODULES:
build.copy_modules(modules)
############################################################
# Implementation of bdist_deb command
############################################################
if DEB_PACKAGE:
utils.deb.DebBuilder(
name=NAME,
version=VERSION,
maintainer="%s <%s>" % (AUTHOR, AUTHOR_EMAIL),
depends=deb_depends,
homepage=URL,
description=DESCRIPTION,
long_description=LONG_DEB_DESCRIPTION,
section="graphics",
package_dirs=build.get_package_dirs(excludes=EXCLUDES),
package_data=package_data,
scripts=scripts,
data_files=data_files,
deb_scripts=deb_scripts,
dst=install_path,
)
############################################################
# Implementation of bdist_rpm command
############################################################
if RPM_PACKAGE:
utils.rpm.RpmBuilder(
name=NAME,
version=VERSION,
release="0",
arch="",
maintainer="%s <%s>" % (AUTHOR, AUTHOR_EMAIL),
summary=DESCRIPTION,
description=LONG_DESCRIPTION,
license=LICENSE,
url=URL,
depends=rpm_depends.split(" "),
build_script="setup.py",
install_path=install_path,
data_files=data_files,
)
os.chdir(CURRENT_PATH)
if CLEAR_BUILD:
build.clear_build()
FOR_CLEAR = ["MANIFEST", "src/script/sk1", "setup.cfg"]
FOR_CLEAR += ["utils"] if CLEAR_UTILS else []
FOR_CLEAR += ["src/uc2"] if CLEAR_UC2 else []
FOR_CLEAR += ["src/wal"] if CLEAR_WAL else []
for item in FOR_CLEAR:
if os.path.lexists(item):
os.remove(item)
|
migrations | 0243_unpack_plugin_source_files | # Generated by Django 3.2.13 on 2022-06-15 15:28
import structlog
from django.core import exceptions
from django.db import migrations
from posthog.plugins.utils import extract_plugin_code
logger = structlog.get_logger(__name__)
def forwards_func(apps, schema_editor):
logger.info("Migration 0243 - started")
Plugin = apps.get_model("posthog", "Plugin")
PluginSourceFile = apps.get_model("posthog", "PluginSourceFile")
# PluginSourceFile.objects.sync_from_plugin_archive() inlined
# 5 changes from the original method:
# - Plugin and PluginSourceFiles have been stripped from types
# (they have to be vars in this scope, but vars cannot be used as types)
# - plugin_json cannot be provided as an arg
# - records are `create()`d instead of `update_or_create()`d
# - `filenames_to_delete` mechanism is removed
# - there's no return value
def sync_from_plugin_archive(plugin):
"""Create PluginSourceFile objects from a plugin that has an archive."""
try:
plugin_json, index_ts, frontend_tsx, site_ts = extract_plugin_code(
plugin.archive
)
except ValueError as e:
raise exceptions.ValidationError(f"{e} in plugin {plugin}")
# Save plugin.json
PluginSourceFile.objects.create(
plugin=plugin, filename="plugin.json", source=plugin_json
)
# Save frontend.tsx
if frontend_tsx is not None:
PluginSourceFile.objects.create(
plugin=plugin, filename="frontend.tsx", source=frontend_tsx
)
# Save site.ts
if site_ts is not None:
PluginSourceFile.objects.create(
plugin=plugin, filename="site.ts", source=site_ts
)
# Save index.ts
if index_ts is not None:
# The original name of the file is not preserved, but this greatly simplifies the rest of the code,
# and we don't need to model the whole filesystem (at this point)
PluginSourceFile.objects.create(
plugin=plugin, filename="index.ts", source=index_ts
)
# Source plugins have already been migrated in 0233_plugin_source_file, while local ones don't store code in the DB
for plugin in Plugin.objects.exclude(plugin_type__in=("source", "local")):
try:
sync_from_plugin_archive(plugin)
except exceptions.ValidationError as e:
logger.warn(
f"Migration 0243 - skipping plugin, failed to extract or save its code.",
plugin=plugin.name,
plugin_id=plugin.id,
error=e,
)
else:
logger.debug(
"Migration 0243 - extracted and saved code of plugin.",
plugin=plugin.name,
plugin_id=plugin.id,
)
logger.info("Migration 0243 - finished")
def reverse_func(apps, schema_editor):
logger.info("Migration 0243 - revert started")
PluginSourceFile = apps.get_model("posthog", "PluginSourceFile")
PluginSourceFile.objects.exclude(
plugin__plugin_type__in=("source", "local")
).delete()
logger.info("Migration 0243 - revert finished")
class Migration(migrations.Migration):
dependencies = [
("posthog", "0242_team_live_events_columns"),
]
operations = [
migrations.RunPython(forwards_func, reverse_func, elidable=True),
]
|
nyaa | bencode | from io import BytesIO
def _pairwise(iterable):
"""Returns items from an iterable two at a time, ala
[0, 1, 2, 3, ...] -> [(0, 1), (2, 3), ...]"""
iterable = iter(iterable)
return zip(iterable, iterable)
__all__ = ["encode", "decode", "BencodeException", "MalformedBencodeException"]
# https://wiki.theory.org/BitTorrentSpecification#Bencoding
class BencodeException(Exception):
pass
class MalformedBencodeException(BencodeException):
pass
# bencode types
_DIGITS = b"0123456789"
_B_INT = b"i"
_B_LIST = b"l"
_B_DICT = b"d"
_B_END = b"e"
# Decoding of bencoded data
def _bencode_decode(file_object, decode_keys_as_utf8=True):
"""Decodes a bencoded value, raising a MalformedBencodeException on errors.
decode_keys_as_utf8 controls decoding dict keys as utf8 (which they
almost always are)"""
if isinstance(file_object, str):
file_object = file_object.encode("utf8")
if isinstance(file_object, bytes):
file_object = BytesIO(file_object)
def create_ex(msg):
return MalformedBencodeException(
"{0} at position {1} (0x{1:02X} hex)".format(msg, file_object.tell())
)
def _read_list():
"""Decodes values from stream until a None is returned ('e')"""
items = []
while True:
value = _bencode_decode(
file_object, decode_keys_as_utf8=decode_keys_as_utf8
)
if value is None:
break
items.append(value)
return items
kind = file_object.read(1)
if not kind:
raise create_ex("EOF, expecting kind")
if kind == _B_INT: # Integer
int_bytes = b""
while True:
c = file_object.read(1)
if not c:
raise create_ex("EOF, expecting more integer")
elif c == _B_END:
try:
return int(int_bytes.decode("utf8"))
except Exception:
raise create_ex("Unable to parse int")
# not a digit OR '-' in the middle of the int
if (c not in _DIGITS + b"-") or (c == b"-" and int_bytes):
raise create_ex("Unexpected input while reading an integer: " + repr(c))
else:
int_bytes += c
elif kind == _B_LIST: # List
return _read_list()
elif kind == _B_DICT: # Dictionary
keys_and_values = _read_list()
if len(keys_and_values) % 2 != 0:
raise MalformedBencodeException("Uneven amount of key/value pairs")
# "Technically" the bencode dictionary keys are bytestrings,
# but real-world they're always(?) UTF-8.
decoded_dict = dict(
(decode_keys_as_utf8 and k.decode("utf8") or k, v)
for k, v in _pairwise(keys_and_values)
)
return decoded_dict
# List/dict end, but make sure input is not just 'e'
elif kind == _B_END and file_object.tell() > 0:
return None
elif kind in _DIGITS: # Bytestring
str_len_bytes = kind # keep first digit
# Read string length until a ':'
while True:
c = file_object.read(1)
if not c:
raise create_ex("EOF, expecting more string len")
if c in _DIGITS:
str_len_bytes += c
elif c == b":":
break
else:
raise create_ex(
"Unexpected input while reading string length: " + repr(c)
)
try:
str_len = int(str_len_bytes.decode())
except Exception:
raise create_ex("Unable to parse bytestring length")
bytestring = file_object.read(str_len)
if len(bytestring) != str_len:
raise create_ex(
"Read only {} bytes, {} wanted".format(len(bytestring), str_len)
)
return bytestring
else:
raise create_ex("Unexpected data type ({})".format(repr(kind)))
# Bencoding
def _bencode_int(value):
"""Encode an integer, eg 64 -> i64e"""
return _B_INT + str(value).encode("utf8") + _B_END
def _bencode_bytes(value):
"""Encode a bytestring (strings as UTF-8), eg 'hello' -> 5:hello"""
if isinstance(value, str):
value = value.encode("utf8")
return str(len(value)).encode("utf8") + b":" + value
def _bencode_list(value):
"""Encode a list, eg [64, "hello"] -> li64e5:helloe"""
return _B_LIST + b"".join(_bencode(item) for item in value) + _B_END
def _bencode_dict(value):
"""Encode a dict, which is keys and values interleaved as a list,
eg {"hello":123}-> d5:helloi123ee"""
dict_keys = sorted(value.keys()) # Sort keys as per spec
return (
_B_DICT
+ b"".join(_bencode_bytes(key) + _bencode(value[key]) for key in dict_keys)
+ _B_END
)
def _bencode(value):
"""Bencode any supported value (int, bytes, str, list, dict)"""
if isinstance(value, int):
return _bencode_int(value)
elif isinstance(value, (str, bytes)):
return _bencode_bytes(value)
elif isinstance(value, list):
return _bencode_list(value)
elif isinstance(value, dict):
return _bencode_dict(value)
raise BencodeException("Unsupported type " + str(type(value)))
# The functions call themselves
encode = _bencode
decode = _bencode_decode
|
extractors | title | __package__ = "archivebox.extractors"
import re
from html.parser import HTMLParser
from pathlib import Path
from typing import Optional
from ..config import (
CHECK_SSL_VALIDITY,
CURL_ARGS,
CURL_BINARY,
CURL_USER_AGENT,
CURL_VERSION,
SAVE_TITLE,
TIMEOUT,
)
from ..index.schema import ArchiveError, ArchiveOutput, ArchiveResult, Link
from ..logging_util import TimedProgress
from ..util import download_url, enforce_types, htmldecode
HTML_TITLE_REGEX = re.compile(
r"<title.*?>" # start matching text after <title> tag
r"(.[^<>]+)", # get everything up to these symbols
re.IGNORECASE | re.MULTILINE | re.DOTALL | re.UNICODE,
)
class TitleParser(HTMLParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title_tag = ""
self.title_og = ""
self.inside_title_tag = False
@property
def title(self):
return self.title_tag or self.title_og or None
def handle_starttag(self, tag, attrs):
if tag.lower() == "title" and not self.title_tag:
self.inside_title_tag = True
elif tag.lower() == "meta" and not self.title_og:
attrs = dict(attrs)
if attrs.get("property") == "og:title" and attrs.get("content"):
self.title_og = attrs.get("content")
def handle_data(self, data):
if self.inside_title_tag and data:
self.title_tag += data.strip()
def handle_endtag(self, tag):
if tag.lower() == "title":
self.inside_title_tag = False
@enforce_types
def get_html(link: Link, path: Path, timeout: int = TIMEOUT) -> str:
"""
Try to find wget, singlefile and then dom files.
If none is found, download the url again.
"""
canonical = link.canonical_outputs()
abs_path = path.absolute()
sources = [
canonical["singlefile_path"],
canonical["wget_path"],
canonical["dom_path"],
]
document = None
for source in sources:
try:
with open(abs_path / source, "r", encoding="utf-8") as f:
document = f.read()
break
except (FileNotFoundError, TypeError):
continue
if document is None:
return download_url(link.url, timeout=timeout)
else:
return document
@enforce_types
def should_save_title(
link: Link, out_dir: Optional[str] = None, overwrite: Optional[bool] = False
) -> bool:
# if link already has valid title, skip it
if not overwrite and link.title and not link.title.lower().startswith("http"):
return False
return SAVE_TITLE
def extract_title_with_regex(html):
match = re.search(HTML_TITLE_REGEX, html)
output = htmldecode(match.group(1).strip()) if match else None
return output
@enforce_types
def save_title(
link: Link, out_dir: Optional[Path] = None, timeout: int = TIMEOUT
) -> ArchiveResult:
"""try to guess the page's title from its content"""
from core.models import Snapshot
output: ArchiveOutput = None
cmd = [
CURL_BINARY,
*CURL_ARGS,
"--max-time",
str(timeout),
*(["--user-agent", "{}".format(CURL_USER_AGENT)] if CURL_USER_AGENT else []),
*([] if CHECK_SSL_VALIDITY else ["--insecure"]),
link.url,
]
status = "succeeded"
timer = TimedProgress(timeout, prefix=" ")
try:
html = get_html(link, out_dir, timeout=timeout)
try:
# try using relatively strict html parser first
parser = TitleParser()
parser.feed(html)
output = parser.title
if output is None:
raise
except Exception:
# fallback to regex that can handle broken/malformed html
output = extract_title_with_regex(html)
# if title is better than the one in the db, update db with new title
if isinstance(output, str) and output:
if not link.title or len(output) >= len(link.title):
Snapshot.objects.filter(url=link.url, timestamp=link.timestamp).update(
title=output
)
else:
# if no content was returned, dont save a title (because it might be a temporary error)
if not html:
raise ArchiveError("Unable to detect page title")
# output = html[:128] # use first bit of content as the title
output = link.base_url # use the filename as the title (better UX)
except Exception as err:
status = "failed"
output = err
finally:
timer.end()
return ArchiveResult(
cmd=cmd,
pwd=str(out_dir),
cmd_version=CURL_VERSION,
output=output,
status=status,
**timer.stats,
)
|
isbn | isbn | """ Use the range message from isbn-international to hyphenate ISBNs """
import os
from typing import Optional
from xml.etree import ElementTree
from xml.etree.ElementTree import Element
import requests
from bookwyrm import settings
def _get_rules(element: Element) -> list[Element]:
if (rules_el := element.find("Rules")) is not None:
return rules_el.findall("Rule")
return []
class IsbnHyphenator:
"""Class to manage the range message xml file and use it to hyphenate ISBNs"""
__range_message_url = "https://www.isbn-international.org/export_rangemessage.xml"
__range_file_path = os.path.join(
settings.BASE_DIR, "bookwyrm", "isbn", "RangeMessage.xml"
)
__element_tree = None
def update_range_message(self) -> None:
"""Download the range message xml file and save it locally"""
response = requests.get(self.__range_message_url)
with open(self.__range_file_path, "w", encoding="utf-8") as file:
file.write(response.text)
self.__element_tree = None
def hyphenate(self, isbn_13: Optional[str]) -> Optional[str]:
"""hyphenate the given ISBN-13 number using the range message"""
if isbn_13 is None:
return None
if self.__element_tree is None:
self.__element_tree = ElementTree.parse(self.__range_file_path)
gs1_prefix = isbn_13[:3]
reg_group = self.__find_reg_group(isbn_13, gs1_prefix)
if reg_group is None:
return isbn_13 # failed to hyphenate
registrant = self.__find_registrant(isbn_13, gs1_prefix, reg_group)
if registrant is None:
return isbn_13 # failed to hyphenate
publication = isbn_13[len(gs1_prefix) + len(reg_group) + len(registrant) : -1]
check_digit = isbn_13[-1:]
return "-".join((gs1_prefix, reg_group, registrant, publication, check_digit))
def __find_reg_group(self, isbn_13: str, gs1_prefix: str) -> Optional[str]:
if self.__element_tree is None:
self.__element_tree = ElementTree.parse(self.__range_file_path)
ucc_prefixes_el = self.__element_tree.find("EAN.UCCPrefixes")
if ucc_prefixes_el is None:
return None
for ean_ucc_el in ucc_prefixes_el.findall("EAN.UCC"):
if (
prefix_el := ean_ucc_el.find("Prefix")
) is not None and prefix_el.text == gs1_prefix:
for rule_el in _get_rules(ean_ucc_el):
length_el = rule_el.find("Length")
if length_el is None:
continue
length = int(text) if (text := length_el.text) else 0
if length == 0:
continue
range_el = rule_el.find("Range")
if range_el is None or range_el.text is None:
continue
reg_grp_range = [int(x[:length]) for x in range_el.text.split("-")]
reg_group = isbn_13[len(gs1_prefix) : len(gs1_prefix) + length]
if reg_grp_range[0] <= int(reg_group) <= reg_grp_range[1]:
return reg_group
return None
return None
def __find_registrant(
self, isbn_13: str, gs1_prefix: str, reg_group: str
) -> Optional[str]:
from_ind = len(gs1_prefix) + len(reg_group)
if self.__element_tree is None:
self.__element_tree = ElementTree.parse(self.__range_file_path)
reg_groups_el = self.__element_tree.find("RegistrationGroups")
if reg_groups_el is None:
return None
for group_el in reg_groups_el.findall("Group"):
if (
prefix_el := group_el.find("Prefix")
) is not None and prefix_el.text == "-".join((gs1_prefix, reg_group)):
for rule_el in _get_rules(group_el):
length_el = rule_el.find("Length")
if length_el is None:
continue
length = int(text) if (text := length_el.text) else 0
if length == 0:
continue
range_el = rule_el.find("Range")
if range_el is None or range_el.text is None:
continue
registrant_range = [
int(x[:length]) for x in range_el.text.split("-")
]
registrant = isbn_13[from_ind : from_ind + length]
if registrant_range[0] <= int(registrant) <= registrant_range[1]:
return registrant
return None
return None
hyphenator_singleton = IsbnHyphenator()
|
migrations | 0001_initial | # Generated by Django 3.2.19 on 2023-07-01 12:28
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("phone_notifications", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="ZvonokPhoneCall",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
(
"status",
models.PositiveSmallIntegerField(
blank=True,
choices=[
(10, "attempts_exc"),
(20, "compl_finished"),
(30, "compl_nofinished"),
(40, "deleted"),
(50, "duration_error"),
(60, "expires"),
(70, "novalid_button"),
(80, "no_provider"),
(90, "interrupted"),
(100, "in_process"),
(110, "pincode_nook"),
(130, "synth_error"),
(140, "user"),
],
null=True,
),
),
("call_id", models.CharField(blank=True, max_length=50)),
("campaign_id", models.CharField(max_length=50)),
(
"phone_call_record",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="zvonok_zvonokphonecall_related",
related_query_name="zvonok_zvonokphonecalls",
to="phone_notifications.phonecallrecord",
),
),
],
options={
"abstract": False,
},
),
]
|
Gui | Util | # -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2017 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import FreeCADGui
import Path
import Path.Base.Util as PathUtil
from PySide import QtCore, QtGui
__title__ = "Path UI helper and utility functions"
__author__ = "sliptonic (Brad Collette)"
__url__ = "https://www.freecad.org"
__doc__ = "A collection of helper and utility functions for the Path GUI."
if False:
Path.Log.setLevel(Path.Log.Level.DEBUG, Path.Log.thisModule())
Path.Log.trackModule(Path.Log.thisModule())
else:
Path.Log.setLevel(Path.Log.Level.INFO, Path.Log.thisModule())
def populateCombobox(form, enumTups, comboBoxesPropertyMap):
"""populateCombobox(form, enumTups, comboBoxesPropertyMap) ... populate comboboxes with translated enumerations
** comboBoxesPropertyMap will be unnecessary if UI files use strict combobox naming protocol.
Args:
form = UI form
enumTups = list of (translated_text, data_string) tuples
comboBoxesPropertyMap = list of (translated_text, data_string) tuples
"""
Path.Log.track(enumTups)
# Load appropriate enumerations in each combobox
for cb, prop in comboBoxesPropertyMap:
box = getattr(form, cb) # Get the combobox
box.clear() # clear the combobox
for text, data in enumTups[prop]: # load enumerations
box.addItem(text, data)
def updateInputField(obj, prop, widget, onBeforeChange=None):
"""updateInputField(obj, prop, widget) ... update obj's property prop with the value of widget.
The property's value is only assigned if the new value differs from the current value.
This prevents onChanged notifications where the value didn't actually change.
Gui::InputField and Gui::QuantitySpinBox widgets are supported - and the property can
be of type Quantity or Float.
If onBeforeChange is specified it is called before a new value is assigned to the property.
Returns True if a new value was assigned, False otherwise (new value is the same as the current).
"""
value = widget.property("rawValue")
Path.Log.track("value: {}".format(value))
attr = PathUtil.getProperty(obj, prop)
attrValue = attr.Value if hasattr(attr, "Value") else attr
isDiff = False
if not Path.Geom.isRoughly(attrValue, value):
isDiff = True
else:
if hasattr(obj, "ExpressionEngine"):
exprSet = False
for prp, expr in obj.ExpressionEngine:
if prp == prop:
exprSet = True
Path.Log.debug('prop = "expression": {} = "{}"'.format(prp, expr))
value = FreeCAD.Units.Quantity(obj.evalExpression(expr)).Value
if not Path.Geom.isRoughly(attrValue, value):
isDiff = True
break
if exprSet:
widget.setReadOnly(True)
widget.setProperty("exprSet", "true")
widget.style().unpolish(widget)
widget.ensurePolished()
else:
widget.setReadOnly(False)
widget.setProperty("exprSet", "false")
widget.style().unpolish(widget)
widget.ensurePolished()
widget.update()
if isDiff:
Path.Log.debug(
"updateInputField(%s, %s): %.2f -> %.2f" % (obj.Label, prop, attr, value)
)
if onBeforeChange:
onBeforeChange(obj)
PathUtil.setProperty(obj, prop, value)
return True
return False
class QuantitySpinBox(QtCore.QObject):
"""Controller class to interface a Gui::QuantitySpinBox.
The spin box gets bound to a given property and supports update in both directions.
QuatitySpinBox(widget, obj, prop, onBeforeChange=None)
widget ... expected to be reference to a Gui::QuantitySpinBox
obj ... document object
prop ... canonical name of the (sub-) property
onBeforeChange ... an optional callback being executed before the value of the property is changed
"""
def __init__(self, widget, obj, prop, onBeforeChange=None):
super().__init__()
Path.Log.track(widget)
self.widget = widget
self.onBeforeChange = onBeforeChange
self.prop = None
self.obj = obj
self.lastWidgetText = self.widget.text()
self.attachTo(obj, prop)
self.widget.installEventFilter(self)
# Connect local class method as slot
self.widget.textChanged.connect(self.onWidgetValueChanged)
def eventFilter(self, obj, event):
if event.type() == QtCore.QEvent.Type.FocusIn:
self.updateSpinBox()
return False
def onWidgetValueChanged(self):
"""onWidgetValueChanged()... Slot method for determining if a change
in widget value is a result of an expression edit, or a simple spinbox change.
If the former, emit a manual `editingFinished` signal because the Formula Editor
window returned a value to the base widget, leaving it in read-only mode,
and finishing the editing of the value. Otherwise, due nothing if the value
has not changed, or there is no active expression for the property.
If the user closes the Formula Editor to cancel the edit, the value will not
be changed, and this manual signal will not be emitted."""
if self._hasExpression() and self.widget.text() != self.lastWidgetText:
self.widget.editingFinished.emit()
def attachTo(self, obj, prop=None):
"""attachTo(obj, prop=None) ... use an existing editor for the given object and property"""
Path.Log.track(self.prop, prop)
self.obj = obj
self.prop = prop
if obj and prop:
attr = PathUtil.getProperty(obj, prop)
if attr is not None:
if hasattr(attr, "Value"):
self.widget.setProperty("unit", attr.getUserPreferred()[2])
self.widget.setProperty("binding", "%s.%s" % (obj.Name, prop))
self.valid = True
else:
Path.Log.warning(
"Cannot find property {} of {}".format(prop, obj.Label)
)
self.valid = False
else:
self.valid = False
def expression(self):
"""expression() ... returns the expression if one is bound to the property"""
Path.Log.track(self.prop, self.valid)
if self.valid:
return self.widget.property("expression")
return ""
def setMinimum(self, quantity):
"""setMinimum(quantity) ... set the minimum"""
Path.Log.track(self.prop, self.valid)
if self.valid:
value = quantity.Value if hasattr(quantity, "Value") else quantity
self.widget.setProperty("setMinimum", value)
def updateSpinBox(self, quantity=None):
"""updateSpinBox(quantity=None) ... update the display value of the spin box.
If no value is provided the value of the bound property is used.
quantity can be of type Quantity or Float."""
Path.Log.track(self.prop, self.valid, quantity)
if self.valid:
expr = self._hasExpression()
if quantity is None:
if expr:
quantity = FreeCAD.Units.Quantity(self.obj.evalExpression(expr))
else:
quantity = PathUtil.getProperty(self.obj, self.prop)
value = quantity.Value if hasattr(quantity, "Value") else quantity
self.widget.setProperty("rawValue", value)
self.lastWidgetText = self.widget.text() # update last widget value
if expr:
self.widget.setReadOnly(True)
self.widget.setProperty("exprSet", "true")
self.widget.style().unpolish(self.widget)
self.widget.ensurePolished()
else:
self.widget.setReadOnly(False)
self.widget.setProperty("exprSet", "false")
self.widget.style().unpolish(self.widget)
self.widget.ensurePolished()
def updateProperty(self):
"""updateProperty() ... update the bound property with the value from the spin box"""
Path.Log.track(self.prop, self.valid)
if self.valid:
return updateInputField(
self.obj, self.prop, self.widget, self.onBeforeChange
)
return None
def _hasExpression(self):
for prop, exp in self.obj.ExpressionEngine:
if prop == self.prop:
return exp
return None
def getDocNode():
doc = FreeCADGui.ActiveDocument.Document.Name
tws = FreeCADGui.getMainWindow().findChildren(QtGui.QTreeWidget)
for tw in tws:
if tw.topLevelItemCount() != 1 or tw.topLevelItem(0).text(0) != "Application":
continue
toptree = tw.topLevelItem(0)
for i in range(0, toptree.childCount()):
docitem = toptree.child(i)
if docitem.text(0) == doc:
return docitem
return None
def disableItem(item):
Dragflag = QtCore.Qt.ItemFlag.ItemIsDragEnabled
Dropflag = QtCore.Qt.ItemFlag.ItemIsDropEnabled
item.setFlags(item.flags() & ~Dragflag)
item.setFlags(item.flags() & ~Dropflag)
for idx in range(0, item.childCount()):
disableItem(item.child(idx))
def findItem(docitem, objname):
print(docitem.text(0))
for i in range(0, docitem.childCount()):
if docitem.child(i).text(0) == objname:
return docitem.child(i)
res = findItem(docitem.child(i), objname)
if res:
return res
return None
|
models | admintools | # The contents of this file are subject to the Common Public Attribution
# License Version 1.0. (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
# software over a computer network and provide for limited attribution for the
# Original Developer. In addition, Exhibit A has been modified to be consistent
# with Exhibit B.
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
# the specific language governing rights and limitations under the License.
#
# The Original Code is reddit.
#
# The Original Developer is the Initial Developer. The Initial Developer of
# the Original Code is reddit Inc.
#
# All portions of the code written by reddit are Copyright (c) 2006-2015 reddit
# Inc. All Rights Reserved.
###############################################################################
from copy import copy
from datetime import datetime, timedelta
from _pylibmc import MemcachedError
from pylons import app_globals as g
from pylons import config
from pylons import tmpl_context as c
from pylons.i18n import _
from r2.lib import amqp
from r2.lib.db import tdb_cassandra
from r2.lib.db.thing import NotFound
from r2.lib.errors import MessageError
from r2.lib.filters import websafe
from r2.lib.hooks import HookRegistrar
from r2.lib.utils import fetch_things2, tup
from r2.models import Account, Comment, Link, Message, NotFound, Report, Subreddit
from r2.models.award import Award
from r2.models.gold import append_random_bottlecap_phrase, creddits_lock
from r2.models.token import AwardClaimToken
from r2.models.wiki import WikiPage
admintools_hooks = HookRegistrar()
class AdminTools(object):
def spam(
self,
things,
auto=True,
moderator_banned=False,
banner=None,
date=None,
train_spam=True,
**kw,
):
from r2.lib.db import queries
all_things = tup(things)
new_things = [x for x in all_things if not x._spam]
Report.accept(all_things, True)
for t in all_things:
if getattr(t, "promoted", None) is not None:
g.log.debug("Refusing to mark promotion %r as spam" % t)
continue
if not t._spam and train_spam:
note = "spam"
elif not t._spam and not train_spam:
note = "remove not spam"
elif t._spam and not train_spam:
note = "confirm spam"
elif t._spam and train_spam:
note = "reinforce spam"
t._spam = True
if moderator_banned:
t.verdict = "mod-removed"
elif not auto:
t.verdict = "admin-removed"
ban_info = copy(getattr(t, "ban_info", {}))
if isinstance(banner, dict):
ban_info["banner"] = banner[t._fullname]
else:
ban_info["banner"] = banner
ban_info.update(
auto=auto,
moderator_banned=moderator_banned,
banned_at=date or datetime.now(g.tz),
**kw,
)
ban_info["note"] = note
t.ban_info = ban_info
t._commit()
if auto:
amqp.add_item("auto_removed", t._fullname)
if not auto:
self.author_spammer(new_things, True)
self.set_last_sr_ban(new_things)
queries.ban(all_things, filtered=auto)
for t in all_things:
if auto:
amqp.add_item("auto_removed", t._fullname)
if isinstance(t, Comment):
amqp.add_item("removed_comment", t._fullname)
elif isinstance(t, Link):
amqp.add_item("removed_link", t._fullname)
def unspam(
self,
things,
moderator_unbanned=True,
unbanner=None,
train_spam=True,
insert=True,
):
from r2.lib.db import queries
things = tup(things)
# We want to make unban-all moderately efficient, so when
# mass-unbanning, we're going to skip the code below on links that
# are already not banned. However, when someone manually clicks
# "approve" on an unbanned link, and there's just one, we want do
# want to run the code below. That way, the little green checkmark
# will have the right mouseover details, the reports will be
# cleared, etc.
if len(things) > 1:
things = [x for x in things if x._spam]
Report.accept(things, False)
for t in things:
ban_info = copy(getattr(t, "ban_info", {}))
ban_info["unbanned_at"] = datetime.now(g.tz)
if unbanner:
ban_info["unbanner"] = unbanner
if ban_info.get("reset_used", None) == None:
ban_info["reset_used"] = False
else:
ban_info["reset_used"] = True
t.ban_info = ban_info
t._spam = False
if moderator_unbanned:
t.verdict = "mod-approved"
else:
t.verdict = "admin-approved"
t._commit()
if isinstance(t, Comment):
amqp.add_item("approved_comment", t._fullname)
elif isinstance(t, Link):
amqp.add_item("approved_link", t._fullname)
self.author_spammer(things, False)
self.set_last_sr_ban(things)
queries.unban(things, insert)
def report(self, thing):
pass
def author_spammer(self, things, spam):
"""incr/decr the 'spammer' field for the author of every
passed thing"""
by_aid = {}
for thing in things:
if hasattr(thing, "author_id") and not getattr(thing, "ban_info", {}).get(
"auto", True
):
# only decrement 'spammer' for items that were not
# autobanned
by_aid.setdefault(thing.author_id, []).append(thing)
if by_aid:
authors = Account._byID(by_aid.keys(), data=True, return_dict=True)
for aid, author_things in by_aid.iteritems():
author = authors[aid]
author._incr(
"spammer", len(author_things) if spam else -len(author_things)
)
def set_last_sr_ban(self, things):
by_srid = {}
for thing in things:
if getattr(thing, "sr_id", None) is not None:
by_srid.setdefault(thing.sr_id, []).append(thing)
if by_srid:
srs = Subreddit._byID(by_srid.keys(), data=True, return_dict=True)
for sr_id, sr_things in by_srid.iteritems():
sr = srs[sr_id]
sr.last_mod_action = datetime.now(g.tz)
sr._commit()
sr._incr("mod_actions", len(sr_things))
def adjust_gold_expiration(self, account, days=0, months=0, years=0):
now = datetime.now(g.display_tz)
if months % 12 == 0:
years += months / 12
else:
days += months * 31
days += years * 366
existing_expiration = getattr(account, "gold_expiration", None)
if existing_expiration is None or existing_expiration < now:
existing_expiration = now
account.gold_expiration = existing_expiration + timedelta(days)
if account.gold_expiration > now and not account.gold:
self.engolden(account)
elif account.gold_expiration <= now and account.gold:
self.degolden(account)
account._commit()
def engolden(self, account):
now = datetime.now(g.display_tz)
account.gold = True
description = "Since " + now.strftime("%B %Y")
trophy = Award.give_if_needed(
"reddit_gold", account, description=description, url="/gold/about"
)
if trophy and trophy.description.endswith("Member Emeritus"):
trophy.description = description
trophy._commit()
account._commit()
account.friend_rels_cache(_update=True)
def degolden(self, account):
Award.take_away("reddit_gold", account)
account.gold = False
account._commit()
def admin_list(self):
return list(g.admins)
def create_award_claim_code(
self, unique_award_id, award_codename, description, url
):
"""Create a one-time-use claim URL for a user to claim a trophy.
`unique_award_id` - A string that uniquely identifies the kind of
Trophy the user would be claiming.
See: token.py:AwardClaimToken.uid
`award_codename` - The codename of the Award the user will claim
`description` - The description the Trophy will receive
`url` - The URL the Trophy will receive
"""
award = Award._by_codename(award_codename)
token = AwardClaimToken._new(unique_award_id, award, description, url)
return token.confirm_url()
admintools = AdminTools()
def cancel_subscription(subscr_id):
q = Account._query(Account.c.gold_subscr_id == subscr_id, data=True)
l = list(q)
if len(l) != 1:
g.log.warning(
"Found %d matches for canceled subscription %s" % (len(l), subscr_id)
)
for account in l:
account.gold_subscr_id = None
account._commit()
g.log.info(
"%s canceled their recurring subscription %s" % (account.name, subscr_id)
)
def all_gold_users():
q = Account._query(
Account.c.gold == True, Account.c._spam == (True, False), data=True, sort="_id"
)
return fetch_things2(q)
def accountid_from_subscription(subscr_id):
if subscr_id is None:
return None
q = Account._query(
Account.c.gold_subscr_id == subscr_id,
Account.c._spam == (True, False),
Account.c._deleted == (True, False),
data=False,
)
l = list(q)
if l:
return l[0]._id
else:
return None
def update_gold_users():
now = datetime.now(g.display_tz)
warning_days = 3
renew_msg = _(
"[Click here for details on how to set up an "
"automatically-renewing subscription or to renew.]"
"(/gold) If you have any thoughts, complaints, "
"rants, suggestions about reddit gold, please write "
"to us at %(gold_email)s. Your feedback would be "
"much appreciated.\n\nThank you for your past "
"patronage."
) % {"gold_email": g.goldsupport_email}
for account in all_gold_users():
days_left = (account.gold_expiration - now).days
if days_left < 0:
if account.pref_creddit_autorenew:
with creddits_lock(account):
if account.gold_creddits > 0:
admintools.adjust_gold_expiration(account, days=31)
account.gold_creddits -= 1
account._commit()
continue
admintools.degolden(account)
subject = _("Your reddit gold subscription has expired.")
message = _("Your subscription to reddit gold has expired.")
message += "\n\n" + renew_msg
message = append_random_bottlecap_phrase(message)
send_system_message(account, subject, message, distinguished="gold-auto")
elif days_left <= warning_days and not account.gold_will_autorenew:
hc_key = "gold_expiration_notice-" + account.name
already_warned = g.hardcache.get(hc_key)
if not already_warned:
g.hardcache.set(hc_key, True, 86400 * (warning_days + 1))
subject = _("Your reddit gold subscription is about to " "expire!")
message = _(
"Your subscription to reddit gold will be " "expiring soon."
)
message += "\n\n" + renew_msg
message = append_random_bottlecap_phrase(message)
send_system_message(
account, subject, message, distinguished="gold-auto"
)
def is_banned_domain(dom):
return None
def is_shamed_domain(dom):
return False, None, None
def bans_for_domain_parts(dom):
return []
def apply_updates(user, timer):
pass
def ip_span(ip):
ip = websafe(ip)
return "<!-- %s -->" % ip
def wiki_template(template_slug, sr=None):
"""Pull content from a subreddit's wiki page for internal use."""
if not sr:
try:
sr = Subreddit._by_name(g.default_sr)
except NotFound:
return None
try:
wiki = WikiPage.get(sr, "templates/%s" % template_slug)
except tdb_cassandra.NotFound:
return None
return wiki._get("content")
@admintools_hooks.on("account.registered")
def send_welcome_message(user):
welcome_title = wiki_template("welcome_title")
welcome_message = wiki_template("welcome_message")
if not welcome_title or not welcome_message:
g.log.warning("Unable to send welcome message: invalid wiki templates.")
return
welcome_title = welcome_title.format(username=user.name)
welcome_message = welcome_message.format(username=user.name)
return send_system_message(user, welcome_title, welcome_message)
def send_system_message(
user,
subject,
body,
system_user=None,
distinguished="admin",
repliable=False,
add_to_sent=True,
author=None,
signed=False,
):
from r2.lib.db import queries
if system_user is None:
system_user = Account.system_user()
if not system_user:
g.log.warning(
"Can't send system message "
"- invalid system_user or g.system_user setting"
)
return
if not author:
author = system_user
item, inbox_rel = Message._new(author, user, subject, body, ip="0.0.0.0")
item.distinguished = distinguished
item.repliable = repliable
item.display_author = system_user._id
item.signed = signed
item._commit()
try:
queries.new_message(item, inbox_rel, add_to_sent=add_to_sent)
except MemcachedError:
raise MessageError("reddit_inbox")
if config["r2.import_private"]:
from r2admin.models.admintools import *
|
extractor | trilulilu | # coding: utf-8
from __future__ import unicode_literals
from ..utils import ExtractorError, int_or_none, parse_iso8601
from .common import InfoExtractor
class TriluliluIE(InfoExtractor):
_VALID_URL = r"https?://(?:(?:www|m)\.)?trilulilu\.ro/(?:[^/]+/)?(?P<id>[^/#\?]+)"
_TESTS = [
{
"url": "http://www.trilulilu.ro/big-buck-bunny-1",
"md5": "68da087b676a6196a413549212f60cc6",
"info_dict": {
"id": "ae2899e124140b",
"ext": "mp4",
"title": "Big Buck Bunny",
"description": ":) pentru copilul din noi",
"uploader_id": "chipy",
"upload_date": "20120304",
"timestamp": 1330830647,
"uploader": "chipy",
"view_count": int,
"like_count": int,
"comment_count": int,
},
},
{
"url": "http://www.trilulilu.ro/adena-ft-morreti-inocenta",
"md5": "929dfb8729dc71750463af88bbbbf4a4",
"info_dict": {
"id": "f299710e3c91c5",
"ext": "mp4",
"title": "Adena ft. Morreti - Inocenta",
"description": "pop music",
"uploader_id": "VEVOmixt",
"upload_date": "20151204",
"uploader": "VEVOmixt",
"timestamp": 1449187937,
"view_count": int,
"like_count": int,
"comment_count": int,
},
},
]
def _real_extract(self, url):
display_id = self._match_id(url)
media_info = self._download_json(
"http://m.trilulilu.ro/%s?format=json" % display_id, display_id
)
age_limit = 0
errors = media_info.get("errors", {})
if errors.get("friends"):
raise ExtractorError("This video is private.", expected=True)
elif errors.get("geoblock"):
raise ExtractorError(
"This video is not available in your country.", expected=True
)
elif errors.get("xxx_unlogged"):
age_limit = 18
media_class = media_info.get("class")
if media_class not in ("video", "audio"):
raise ExtractorError("not a video or an audio")
user = media_info.get("user", {})
thumbnail = media_info.get("cover_url")
if thumbnail:
thumbnail.format(width="1600", height="1200")
# TODO: get correct ext for audio files
stream_type = media_info.get("stream_type")
formats = [
{
"url": media_info["href"],
"ext": stream_type,
}
]
if media_info.get("is_hd"):
formats.append(
{
"format_id": "hd",
"url": media_info["hrefhd"],
"ext": stream_type,
}
)
if media_class == "audio":
formats[0]["vcodec"] = "none"
else:
formats[0]["format_id"] = "sd"
return {
"id": media_info["identifier"].split("|")[1],
"display_id": display_id,
"formats": formats,
"title": media_info["title"],
"description": media_info.get("description"),
"thumbnail": thumbnail,
"uploader_id": user.get("username"),
"uploader": user.get("fullname"),
"timestamp": parse_iso8601(media_info.get("published"), " "),
"duration": int_or_none(media_info.get("duration")),
"view_count": int_or_none(media_info.get("count_views")),
"like_count": int_or_none(media_info.get("count_likes")),
"comment_count": int_or_none(media_info.get("count_comments")),
"age_limit": age_limit,
}
|
mainwindow | tabbedbrowser | # SPDX-FileCopyrightText: Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""The main tabbed browser widget."""
import collections
import dataclasses
import datetime
import functools
import weakref
from typing import (
Any,
Deque,
List,
Mapping,
MutableMapping,
MutableSequence,
Optional,
Tuple,
)
from qutebrowser.browser import browsertab, history, signalfilter
from qutebrowser.config import config
from qutebrowser.keyinput import modeman
from qutebrowser.mainwindow import mainwindow, tabwidget
from qutebrowser.misc import objects, quitter
from qutebrowser.qt.core import QPoint, QTimer, QUrl, pyqtSignal, pyqtSlot
from qutebrowser.qt.widgets import QApplication, QSizePolicy, QWidget
from qutebrowser.utils import (
jinja,
log,
message,
qtutils,
urlutils,
usertypes,
utils,
version,
)
@dataclasses.dataclass
class _UndoEntry:
"""Information needed for :undo."""
url: QUrl
history: bytes
index: int
pinned: bool
created_at: datetime.datetime = dataclasses.field(
default_factory=datetime.datetime.now
)
UndoStackType = MutableSequence[MutableSequence[_UndoEntry]]
class TabDeque:
"""Class which manages the 'last visited' tab stack.
Instead of handling deletions by clearing old entries, they are handled by
checking if they exist on access. This allows us to save an iteration on
every tab delete.
Currently, we assume we will switch to the tab returned by any of the
getter functions. This is done because the on_switch functions will be
called upon switch, and we don't want to duplicate entries in the stack
for a single switch.
"""
def __init__(self) -> None:
size = config.val.tabs.focus_stack_size
if size < 0:
size = None
self._stack: Deque[
weakref.ReferenceType[browsertab.AbstractTab]
] = collections.deque(maxlen=size)
# Items that have been removed from the primary stack.
self._stack_deleted: List[weakref.ReferenceType[browsertab.AbstractTab]] = []
self._ignore_next = False
self._keep_deleted_next = False
def on_switch(self, old_tab: browsertab.AbstractTab) -> None:
"""Record tab switch events."""
if self._ignore_next:
self._ignore_next = False
self._keep_deleted_next = False
return
tab = weakref.ref(old_tab)
if self._stack_deleted and not self._keep_deleted_next:
self._stack_deleted = []
self._keep_deleted_next = False
self._stack.append(tab)
def prev(self, cur_tab: browsertab.AbstractTab) -> browsertab.AbstractTab:
"""Get the 'previous' tab in the stack.
Throws IndexError on failure.
"""
tab: Optional[browsertab.AbstractTab] = None
while tab is None or tab.pending_removal or tab is cur_tab:
tab = self._stack.pop()()
self._stack_deleted.append(weakref.ref(cur_tab))
self._ignore_next = True
return tab
def next(
self,
cur_tab: browsertab.AbstractTab,
*,
keep_overflow: bool = True,
) -> browsertab.AbstractTab:
"""Get the 'next' tab in the stack.
Throws IndexError on failure.
"""
tab: Optional[browsertab.AbstractTab] = None
while tab is None or tab.pending_removal or tab is cur_tab:
tab = self._stack_deleted.pop()()
# On next tab-switch, current tab will be added to stack as normal.
# However, we shouldn't wipe the overflow stack as normal.
if keep_overflow:
self._keep_deleted_next = True
return tab
def last(self, cur_tab: browsertab.AbstractTab) -> browsertab.AbstractTab:
"""Get the last tab.
Throws IndexError on failure.
"""
try:
return self.next(cur_tab, keep_overflow=False)
except IndexError:
return self.prev(cur_tab)
def update_size(self) -> None:
"""Update the maxsize of this TabDeque."""
newsize = config.val.tabs.focus_stack_size
if newsize < 0:
newsize = None
# We can't resize a collections.deque so just recreate it >:(
self._stack = collections.deque(self._stack, maxlen=newsize)
class TabDeletedError(Exception):
"""Exception raised when _tab_index is called for a deleted tab."""
class TabbedBrowser(QWidget):
"""A TabWidget with QWebViews inside.
Provides methods to manage tabs, convenience methods to interact with the
current tab (cur_*) and filters signals to re-emit them when they occurred
in the currently visible tab.
For all tab-specific signals (cur_*) emitted by a tab, this happens:
- the signal gets filtered with _filter_signals and self.cur_* gets
emitted if the signal occurred in the current tab.
Attributes:
search_text/search_options: Search parameters which are shared between
all tabs.
_win_id: The window ID this tabbedbrowser is associated with.
_filter: A SignalFilter instance.
_now_focused: The tab which is focused now.
_tab_insert_idx_left: Where to insert a new tab with
tabs.new_tab_position set to 'prev'.
_tab_insert_idx_right: Same as above, for 'next'.
undo_stack: List of lists of _UndoEntry objects of closed tabs.
is_shutting_down: Whether we're currently shutting down.
_local_marks: Jump markers local to each page
_global_marks: Jump markers used across all pages
default_window_icon: The qutebrowser window icon
is_private: Whether private browsing is on for this window.
Signals:
cur_progress: Progress of the current tab changed (load_progress).
cur_load_started: Current tab started loading (load_started)
cur_load_finished: Current tab finished loading (load_finished)
cur_url_changed: Current URL changed.
cur_link_hovered: Link hovered in current tab (link_hovered)
cur_scroll_perc_changed: Scroll percentage of current tab changed.
arg 1: x-position in %.
arg 2: y-position in %.
cur_load_status_changed: Loading status of current tab changed.
cur_search_match_changed: The active search match changed.
close_window: The last tab was closed, close this window.
resized: Emitted when the browser window has resized, so the completion
widget can adjust its size to it.
arg: The new size.
current_tab_changed: The current tab changed to the emitted tab.
new_tab: Emits the new WebView and its index when a new tab is opened.
shutting_down: This TabbedBrowser will be deleted soon.
"""
cur_progress = pyqtSignal(int)
cur_load_started = pyqtSignal()
cur_load_finished = pyqtSignal(bool)
cur_url_changed = pyqtSignal(QUrl)
cur_link_hovered = pyqtSignal(str)
cur_scroll_perc_changed = pyqtSignal(int, int)
cur_load_status_changed = pyqtSignal(usertypes.LoadStatus)
cur_search_match_changed = pyqtSignal(browsertab.SearchMatch)
cur_fullscreen_requested = pyqtSignal(bool)
cur_caret_selection_toggled = pyqtSignal(browsertab.SelectionState)
close_window = pyqtSignal()
resized = pyqtSignal("QRect")
current_tab_changed = pyqtSignal(browsertab.AbstractTab)
new_tab = pyqtSignal(browsertab.AbstractTab, int)
shutting_down = pyqtSignal()
def __init__(self, *, win_id, private, parent=None):
if private:
assert not qtutils.is_single_process()
super().__init__(parent)
self.widget = tabwidget.TabWidget(win_id, parent=self)
self._win_id = win_id
self._tab_insert_idx_left = 0
self._tab_insert_idx_right = -1
self.is_shutting_down = False
self.widget.tabCloseRequested.connect(self.on_tab_close_requested)
self.widget.new_tab_requested.connect(self.tabopen) # type: ignore[arg-type,unused-ignore]
self.widget.currentChanged.connect(self._on_current_changed)
self.cur_fullscreen_requested.connect(self.widget.tab_bar().maybe_hide)
self.widget.setSizePolicy(
QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding
)
if (
objects.backend == usertypes.Backend.QtWebEngine
and version.qtwebengine_versions().webengine < utils.VersionNumber(5, 15, 5)
):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
self.cur_load_finished.connect(self._leave_modes_on_load)
else:
self.cur_load_started.connect(self._leave_modes_on_load)
# handle mode_override
self.current_tab_changed.connect(lambda tab: self._mode_override(tab.url()))
self.cur_url_changed.connect(self._mode_override)
# This init is never used, it is immediately thrown away in the next
# line.
self.undo_stack: UndoStackType = collections.deque()
self._update_stack_size()
self._filter = signalfilter.SignalFilter(win_id, self)
self._now_focused = None
self.search_text = None
self.search_options: Mapping[str, Any] = {}
self._local_marks: MutableMapping[QUrl, MutableMapping[str, QPoint]] = {}
self._global_marks: MutableMapping[str, Tuple[QPoint, QUrl]] = {}
self.default_window_icon = self._window().windowIcon()
self.is_private = private
self.tab_deque = TabDeque()
config.instance.changed.connect(self._on_config_changed)
quitter.instance.shutting_down.connect(self.shutdown)
def _update_stack_size(self):
newsize = config.instance.get("tabs.undo_stack_size")
if newsize < 0:
newsize = None
# We can't resize a collections.deque so just recreate it >:(
self.undo_stack = collections.deque(self.undo_stack, maxlen=newsize)
def __repr__(self):
return utils.get_repr(self, count=self.widget.count())
@pyqtSlot(str)
def _on_config_changed(self, option):
if option == "tabs.favicons.show":
self._update_favicons()
elif option == "window.title_format":
self._update_window_title()
elif option == "tabs.undo_stack_size":
self._update_stack_size()
elif option in ["tabs.title.format", "tabs.title.format_pinned"]:
self.widget.update_tab_titles()
elif option == "tabs.focus_stack_size":
self.tab_deque.update_size()
def _tab_index(self, tab):
"""Get the index of a given tab.
Raises TabDeletedError if the tab doesn't exist anymore.
"""
try:
idx = self.widget.indexOf(tab)
except RuntimeError as e:
log.webview.debug("Got invalid tab ({})!".format(e))
raise TabDeletedError(e)
if idx == -1:
log.webview.debug("Got invalid tab (index is -1)!")
raise TabDeletedError("index is -1!")
return idx
def widgets(self):
"""Get a list of open tab widgets.
We don't implement this as generator so we can delete tabs while
iterating over the list.
"""
widgets = []
for i in range(self.widget.count()):
widget = qtutils.add_optional(self.widget.widget(i))
if widget is None:
log.webview.debug("Got None-widget in tabbedbrowser!")
else:
widgets.append(widget)
return widgets
def _update_window_title(self, field=None):
"""Change the window title to match the current tab.
Args:
idx: The tab index to update.
field: A field name which was updated. If given, the title
is only set if the given field is in the template.
"""
title_format = config.cache["window.title_format"]
if field is not None and ("{" + field + "}") not in title_format:
return
idx = self.widget.currentIndex()
if idx == -1:
# (e.g. last tab removed)
log.webview.debug("Not updating window title because index is -1")
return
fields = self.widget.get_tab_fields(idx)
fields["id"] = self._win_id
title = title_format.format(**fields)
# prevent hanging WMs and similar issues with giant URLs
title = utils.elide(title, 1024)
self._window().setWindowTitle(title)
def _connect_tab_signals(self, tab):
"""Set up the needed signals for tab."""
# filtered signals
tab.link_hovered.connect(self._filter.create(self.cur_link_hovered, tab))
tab.load_progress.connect(self._filter.create(self.cur_progress, tab))
tab.load_finished.connect(self._filter.create(self.cur_load_finished, tab))
tab.load_started.connect(self._filter.create(self.cur_load_started, tab))
tab.scroller.perc_changed.connect(
self._filter.create(self.cur_scroll_perc_changed, tab)
)
tab.url_changed.connect(self._filter.create(self.cur_url_changed, tab))
tab.load_status_changed.connect(
self._filter.create(self.cur_load_status_changed, tab)
)
tab.fullscreen_requested.connect(
self._filter.create(self.cur_fullscreen_requested, tab)
)
tab.caret.selection_toggled.connect(
self._filter.create(self.cur_caret_selection_toggled, tab)
)
tab.search.match_changed.connect(
self._filter.create(self.cur_search_match_changed, tab)
)
# misc
tab.scroller.perc_changed.connect(self._on_scroll_pos_changed)
tab.scroller.before_jump_requested.connect(lambda: self.set_mark("'"))
tab.url_changed.connect(functools.partial(self._on_url_changed, tab))
tab.title_changed.connect(functools.partial(self._on_title_changed, tab))
tab.icon_changed.connect(functools.partial(self._on_icon_changed, tab))
tab.pinned_changed.connect(functools.partial(self._on_pinned_changed, tab))
tab.load_progress.connect(functools.partial(self._on_load_progress, tab))
tab.load_finished.connect(functools.partial(self._on_load_finished, tab))
tab.load_started.connect(functools.partial(self._on_load_started, tab))
tab.load_status_changed.connect(
functools.partial(self._on_load_status_changed, tab)
)
tab.window_close_requested.connect(
functools.partial(self._on_window_close_requested, tab)
)
tab.renderer_process_terminated.connect(
functools.partial(self._on_renderer_process_terminated, tab)
)
tab.audio.muted_changed.connect(functools.partial(self._on_audio_changed, tab))
tab.audio.recently_audible_changed.connect(
functools.partial(self._on_audio_changed, tab)
)
tab.new_tab_requested.connect(self.tabopen)
if not self.is_private:
tab.history_item_triggered.connect(history.web_history.add_from_tab)
def _current_tab(self) -> browsertab.AbstractTab:
"""Get the current browser tab.
Note: The assert ensures the current tab is never None.
"""
tab = self.widget.currentWidget()
assert isinstance(tab, browsertab.AbstractTab), tab
return tab
def _window(self) -> QWidget:
"""Get the current window widget.
Note: This asserts if there is no window.
"""
window = self.widget.window()
assert window is not None
return window
def _tab_by_idx(self, idx: int) -> Optional[browsertab.AbstractTab]:
"""Get a browser tab by index.
If no tab was found at the given index, None is returned.
"""
tab = self.widget.widget(idx)
if tab is not None:
assert isinstance(tab, browsertab.AbstractTab), tab
return tab
def current_url(self):
"""Get the URL of the current tab.
Intended to be used from command handlers.
Return:
The current URL as QUrl.
"""
idx = self.widget.currentIndex()
return self.widget.tab_url(idx)
def shutdown(self):
"""Try to shut down all tabs cleanly."""
self.is_shutting_down = True
# Reverse tabs so we don't have to recalculate tab titles over and over
# Removing first causes [2..-1] to be recomputed
# Removing the last causes nothing to be recomputed
for idx, tab in enumerate(reversed(self.widgets())):
self._remove_tab(tab, new_undo=idx == 0)
self.shutting_down.emit()
def tab_close_prompt_if_pinned(
self,
tab,
force,
yes_action,
text="Are you sure you want to close a pinned tab?",
):
"""Helper method for tab_close.
If tab is pinned, prompt. If not, run yes_action.
If tab is destroyed, abort question.
"""
if tab.data.pinned and not force:
message.confirm_async(
title="Pinned Tab",
text=text,
yes_action=yes_action,
default=False,
abort_on=[tab.destroyed],
)
else:
yes_action()
def close_tab(self, tab, *, add_undo=True, new_undo=True, transfer=False):
"""Close a tab.
Args:
tab: The QWebView to be closed.
add_undo: Whether the tab close can be undone.
new_undo: Whether the undo entry should be a new item in the stack.
transfer: Whether the tab is closing because it is moving to a new window.
"""
if config.val.tabs.tabs_are_windows or transfer:
last_close = "close"
else:
last_close = config.val.tabs.last_close
count = self.widget.count()
if last_close == "ignore" and count == 1:
return
self._remove_tab(tab, add_undo=add_undo, new_undo=new_undo)
if count == 1: # We just closed the last tab above.
if last_close == "close":
self.close_window.emit()
elif last_close == "blank":
self.load_url(QUrl("about:blank"), newtab=True)
elif last_close == "startpage":
for url in config.val.url.start_pages:
self.load_url(url, newtab=True)
elif last_close == "default-page":
self.load_url(config.val.url.default_page, newtab=True)
def _remove_tab(self, tab, *, add_undo=True, new_undo=True, crashed=False):
"""Remove a tab from the tab list and delete it properly.
Args:
tab: The QWebView to be closed.
add_undo: Whether the tab close can be undone.
new_undo: Whether the undo entry should be a new item in the stack.
crashed: Whether we're closing a tab with crashed renderer process.
"""
idx = self.widget.indexOf(tab)
if idx == -1:
if crashed:
return
raise TabDeletedError(
"tab {} is not contained in " "TabbedWidget!".format(tab)
)
if tab is self._now_focused:
self._now_focused = None
tab.pending_removal = True
if tab.url().isEmpty():
# There are some good reasons why a URL could be empty
# (target="_blank" with a download, see [1]), so we silently ignore
# this.
# [1] https://github.com/qutebrowser/qutebrowser/issues/163
pass
elif not tab.url().isValid():
# We display a warning for URLs which are not empty but invalid -
# but we don't return here because we want the tab to close either
# way.
urlutils.invalid_url_error(tab.url(), "saving tab")
elif add_undo:
try:
history_data = tab.history.private_api.serialize()
except browsertab.WebTabError:
pass # special URL
else:
entry = _UndoEntry(
url=tab.url(),
history=history_data,
index=idx,
pinned=tab.data.pinned,
)
if new_undo or not self.undo_stack:
self.undo_stack.append([entry])
else:
self.undo_stack[-1].append(entry)
tab.private_api.shutdown()
self.widget.removeTab(idx)
tab.deleteLater()
def undo(self, depth=1):
"""Undo removing of a tab or tabs."""
# Remove unused tab which may be created after the last tab is closed
last_close = config.val.tabs.last_close
use_current_tab = False
last_close_replaces = last_close in ["blank", "startpage", "default-page"]
only_one_tab_open = self.widget.count() == 1
if only_one_tab_open and last_close_replaces:
tab = self._tab_by_idx(0)
assert tab is not None
no_history = len(tab.history) == 1
urls = {
"blank": QUrl("about:blank"),
"startpage": config.val.url.start_pages[0],
"default-page": config.val.url.default_page,
}
first_tab_url = tab.url()
last_close_urlstr = urls[last_close].toString().rstrip("/")
first_tab_urlstr = first_tab_url.toString().rstrip("/")
last_close_url_used = first_tab_urlstr == last_close_urlstr
use_current_tab = no_history and last_close_url_used
entries = self.undo_stack[-depth]
del self.undo_stack[-depth]
for entry in reversed(entries):
if use_current_tab:
newtab = self._tab_by_idx(0)
assert newtab is not None
use_current_tab = False
else:
newtab = self.tabopen(background=False, idx=entry.index)
newtab.history.private_api.deserialize(entry.history)
newtab.set_pinned(entry.pinned)
newtab.setFocus()
@pyqtSlot("QUrl", bool)
def load_url(self, url, newtab):
"""Open a URL, used as a slot.
Args:
url: The URL to open as QUrl.
newtab: True to open URL in a new tab, False otherwise.
"""
qtutils.ensure_valid(url)
if newtab or self.widget.currentWidget() is None:
self.tabopen(url, background=False)
else:
self._current_tab().load_url(url)
@pyqtSlot(int)
def on_tab_close_requested(self, idx):
"""Close a tab via an index."""
tab = self._tab_by_idx(idx)
if tab is None:
log.webview.debug("Got invalid tab {} for index {}!".format(tab, idx))
return
self.tab_close_prompt_if_pinned(tab, False, lambda: self.close_tab(tab))
@pyqtSlot(browsertab.AbstractTab)
def _on_window_close_requested(self, widget):
"""Close a tab with a widget given."""
try:
self.close_tab(widget)
except TabDeletedError:
log.webview.debug(
"Requested to close {!r} which does not " "exist!".format(widget)
)
@pyqtSlot("QUrl")
@pyqtSlot("QUrl", bool)
@pyqtSlot("QUrl", bool, bool)
def tabopen(
self,
url: QUrl = None,
background: bool = None,
related: bool = True,
idx: int = None,
) -> browsertab.AbstractTab:
"""Open a new tab with a given URL.
Inner logic for open-tab and open-tab-bg.
Also connect all the signals we need to _filter_signals.
Args:
url: The URL to open as QUrl or None for an empty tab.
background: Whether to open the tab in the background.
if None, the `tabs.background` setting decides.
related: Whether the tab was opened from another existing tab.
If this is set, the new position might be different. With
the default settings we handle it like Chromium does:
- Tabs from clicked links etc. are to the right of
the current (related=True).
- Explicitly opened tabs are at the very right
(related=False)
idx: The index where the new tab should be opened.
Return:
The opened WebView instance.
"""
if url is not None:
qtutils.ensure_valid(url)
log.webview.debug(
"Creating new tab with URL {}, background {}, " "related {}, idx {}".format(
url, background, related, idx
)
)
prev_focus = QApplication.focusWidget()
if config.val.tabs.tabs_are_windows and self.widget.count() > 0:
window = mainwindow.MainWindow(private=self.is_private)
tab = window.tabbed_browser.tabopen(
url=url, background=background, related=related
)
window.show()
return tab
tab = browsertab.create(
win_id=self._win_id, private=self.is_private, parent=self.widget
)
self._connect_tab_signals(tab)
if idx is None:
idx = self._get_new_tab_idx(related)
self.widget.insertTab(idx, tab, "")
if url is not None:
tab.load_url(url)
if background is None:
background = config.val.tabs.background
if background:
# Make sure the background tab has the correct initial size.
# With a foreground tab, it's going to be resized correctly by the
# layout anyways.
current_widget = self._current_tab()
tab.resize(current_widget.size())
self.widget.tab_index_changed.emit(
self.widget.currentIndex(), self.widget.count()
)
# Refocus webview in case we lost it by spawning a bg tab
current_widget.setFocus()
else:
self.widget.setCurrentWidget(tab)
mode = modeman.instance(self._win_id).mode
if mode in [
usertypes.KeyMode.command,
usertypes.KeyMode.prompt,
usertypes.KeyMode.yesno,
]:
# If we were in a command prompt, restore old focus
# The above commands need to be run to switch tabs
if prev_focus is not None:
prev_focus.setFocus()
tab.show()
self.new_tab.emit(tab, idx)
return tab
def _get_new_tab_idx(self, related):
"""Get the index of a tab to insert.
Args:
related: Whether the tab was opened from another tab (as a "child")
Return:
The index of the new tab.
"""
if related:
pos = config.val.tabs.new_position.related
else:
pos = config.val.tabs.new_position.unrelated
if pos == "prev":
if config.val.tabs.new_position.stacking:
idx = self._tab_insert_idx_left
# On first sight, we'd think we have to decrement
# self._tab_insert_idx_left here, as we want the next tab to be
# *before* the one we just opened. However, since we opened a
# tab *before* the currently focused tab, indices will shift by
# 1 automatically.
else:
idx = self.widget.currentIndex()
elif pos == "next":
if config.val.tabs.new_position.stacking:
idx = self._tab_insert_idx_right
else:
idx = self.widget.currentIndex() + 1
self._tab_insert_idx_right += 1
elif pos == "first":
idx = 0
elif pos == "last":
idx = -1
else:
raise ValueError("Invalid tabs.new_position '{}'.".format(pos))
log.webview.debug(
"tabs.new_position {} -> opening new tab at {}, "
"next left: {} / right: {}".format(
pos, idx, self._tab_insert_idx_left, self._tab_insert_idx_right
)
)
return idx
def _update_favicons(self):
"""Update favicons when config was changed."""
for tab in self.widgets():
self.widget.update_tab_favicon(tab)
@pyqtSlot()
def _on_load_started(self, tab):
"""Clear icon and update title when a tab started loading.
Args:
tab: The tab where the signal belongs to.
"""
if tab.data.keep_icon:
tab.data.keep_icon = False
elif config.cache["tabs.tabs_are_windows"] and tab.data.should_show_icon():
self._window().setWindowIcon(self.default_window_icon)
@pyqtSlot()
def _on_load_status_changed(self, tab):
"""Update tab/window titles if the load status changed."""
try:
idx = self._tab_index(tab)
except TabDeletedError:
# We can get signals for tabs we already deleted...
return
self.widget.update_tab_title(idx)
if idx == self.widget.currentIndex():
self._update_window_title()
@pyqtSlot()
def _leave_modes_on_load(self):
"""Leave insert/hint mode when loading started."""
try:
url = self.current_url()
if not url.isValid():
url = None
except qtutils.QtValueError:
url = None
if config.instance.get("input.insert_mode.leave_on_load", url=url):
modeman.leave(
self._win_id, usertypes.KeyMode.insert, "load started", maybe=True
)
else:
log.modes.debug("Ignoring leave_on_load request due to setting.")
if config.cache["hints.leave_on_load"]:
modeman.leave(
self._win_id, usertypes.KeyMode.hint, "load started", maybe=True
)
else:
log.modes.debug("Ignoring leave_on_load request due to setting.")
@pyqtSlot(browsertab.AbstractTab, str)
def _on_title_changed(self, tab, text):
"""Set the title of a tab.
Slot for the title_changed signal of any tab.
Args:
tab: The WebView where the title was changed.
text: The text to set.
"""
if not text:
log.webview.debug("Ignoring title change to '{}'.".format(text))
return
try:
idx = self._tab_index(tab)
except TabDeletedError:
# We can get signals for tabs we already deleted...
return
log.webview.debug("Changing title for idx {} to '{}'".format(idx, text))
self.widget.set_page_title(idx, text)
if idx == self.widget.currentIndex():
self._update_window_title()
@pyqtSlot(browsertab.AbstractTab, QUrl)
def _on_url_changed(self, tab, url):
"""Set the new URL as title if there's no title yet.
Args:
tab: The WebView where the title was changed.
url: The new URL.
"""
try:
idx = self._tab_index(tab)
except TabDeletedError:
# We can get signals for tabs we already deleted...
return
if not self.widget.page_title(idx):
self.widget.set_page_title(idx, url.toDisplayString())
def _mode_override(self, url: QUrl) -> None:
"""Override mode if url matches pattern.
Args:
url: The QUrl to match for
"""
if not url.isValid():
return
mode = config.instance.get("input.mode_override", url=url)
if mode:
log.modes.debug(f"Mode change to {mode} triggered for url {url}")
modeman.enter(
self._win_id,
usertypes.KeyMode[mode],
reason="mode_override",
)
@pyqtSlot(browsertab.AbstractTab)
def _on_icon_changed(self, tab):
"""Set the icon of a tab.
Slot for the iconChanged signal of any tab.
Args:
tab: The WebView where the title was changed.
"""
try:
self._tab_index(tab)
except TabDeletedError:
# We can get signals for tabs we already deleted...
return
self.widget.update_tab_favicon(tab)
@pyqtSlot(usertypes.KeyMode)
def on_mode_entered(self, mode):
"""Save input mode when tabs.mode_on_change = restore."""
if config.val.tabs.mode_on_change == "restore" and mode in modeman.INPUT_MODES:
tab = self.widget.currentWidget()
if tab is not None:
assert isinstance(tab, browsertab.AbstractTab), tab
tab.data.input_mode = mode
@pyqtSlot(usertypes.KeyMode)
def on_mode_left(self, mode):
"""Give focus to current tab if command mode was left."""
widget = qtutils.add_optional(self.widget.currentWidget())
if widget is None:
return
if mode in [usertypes.KeyMode.command] + modeman.PROMPT_MODES:
log.modes.debug("Left status-input mode, focusing {!r}".format(widget))
widget.setFocus()
if config.val.tabs.mode_on_change == "restore":
assert isinstance(widget, browsertab.AbstractTab), widget
widget.data.input_mode = usertypes.KeyMode.normal
@pyqtSlot(int)
def _on_current_changed(self, idx):
"""Add prev tab to stack and leave hinting mode when focus changed."""
mode_on_change = config.val.tabs.mode_on_change
if idx == -1 or self.is_shutting_down:
# closing the last tab (before quitting) or shutting down
return
tab = self._tab_by_idx(idx)
if tab is None:
log.webview.debug(
"on_current_changed got called with invalid index {}".format(idx)
)
return
log.modes.debug("Current tab changed, focusing {!r}".format(tab))
tab.setFocus()
modes_to_leave = [usertypes.KeyMode.hint, usertypes.KeyMode.caret]
mm_instance = modeman.instance(self._win_id)
current_mode = mm_instance.mode
log.modes.debug(
"Mode before tab change: {} (mode_on_change = {})".format(
current_mode.name, mode_on_change
)
)
if mode_on_change == "normal":
modes_to_leave += modeman.INPUT_MODES
for mode in modes_to_leave:
modeman.leave(self._win_id, mode, "tab changed", maybe=True)
if mode_on_change == "restore" and current_mode not in modeman.PROMPT_MODES:
modeman.enter(self._win_id, tab.data.input_mode, "restore")
if self._now_focused is not None:
self.tab_deque.on_switch(self._now_focused)
log.modes.debug(
"Mode after tab change: {} (mode_on_change = {})".format(
current_mode.name, mode_on_change
)
)
self._now_focused = tab
self.current_tab_changed.emit(tab)
self.cur_search_match_changed.emit(tab.search.match)
QTimer.singleShot(0, self._update_window_title)
self._tab_insert_idx_left = self.widget.currentIndex()
self._tab_insert_idx_right = self.widget.currentIndex() + 1
@pyqtSlot()
def on_cmd_return_pressed(self):
"""Set focus when the commandline closes."""
log.modes.debug("Commandline closed, focusing {!r}".format(self))
def _on_load_progress(self, tab, perc):
"""Adjust tab indicator on load progress."""
try:
idx = self._tab_index(tab)
except TabDeletedError:
# We can get signals for tabs we already deleted...
return
start = config.cache["colors.tabs.indicator.start"]
stop = config.cache["colors.tabs.indicator.stop"]
system = config.cache["colors.tabs.indicator.system"]
color = qtutils.interpolate_color(start, stop, perc, system)
self.widget.set_tab_indicator_color(idx, color)
self.widget.update_tab_title(idx)
if idx == self.widget.currentIndex():
self._update_window_title()
def _on_load_finished(self, tab, ok):
"""Adjust tab indicator when loading finished."""
try:
idx = self._tab_index(tab)
except TabDeletedError:
# We can get signals for tabs we already deleted...
return
if ok:
start = config.cache["colors.tabs.indicator.start"]
stop = config.cache["colors.tabs.indicator.stop"]
system = config.cache["colors.tabs.indicator.system"]
color = qtutils.interpolate_color(start, stop, 100, system)
else:
color = config.cache["colors.tabs.indicator.error"]
self.widget.set_tab_indicator_color(idx, color)
if idx == self.widget.currentIndex():
tab.private_api.handle_auto_insert_mode(ok)
@pyqtSlot()
def _on_scroll_pos_changed(self):
"""Update tab and window title when scroll position changed."""
idx = self.widget.currentIndex()
if idx == -1:
# (e.g. last tab removed)
log.webview.debug("Not updating scroll position because index is " "-1")
return
self._update_window_title("scroll_pos")
self.widget.update_tab_title(idx, "scroll_pos")
def _on_pinned_changed(self, tab):
"""Update the tab's pinned status."""
idx = self.widget.indexOf(tab)
self.widget.update_tab_favicon(tab)
self.widget.update_tab_title(idx)
def _on_audio_changed(self, tab, _muted):
"""Update audio field in tab when mute or recentlyAudible changed."""
try:
idx = self._tab_index(tab)
except TabDeletedError:
# We can get signals for tabs we already deleted...
return
self.widget.update_tab_title(idx, "audio")
if idx == self.widget.currentIndex():
self._update_window_title("audio")
def _on_renderer_process_terminated(self, tab, status, code):
"""Show an error when a renderer process terminated."""
if status == browsertab.TerminationStatus.normal:
return
messages = {
browsertab.TerminationStatus.abnormal: "Renderer process exited",
browsertab.TerminationStatus.crashed: "Renderer process crashed",
browsertab.TerminationStatus.killed: "Renderer process was killed",
browsertab.TerminationStatus.unknown: "Renderer process did not start",
}
msg = messages[status] + f" (status {code})"
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-91715
versions = version.qtwebengine_versions()
is_qtbug_91715 = (
status == browsertab.TerminationStatus.unknown
and code == 1002
and versions.webengine == utils.VersionNumber(5, 15, 3)
)
def show_error_page(html):
tab.set_html(html)
log.webview.error(msg)
if is_qtbug_91715:
log.webview.error(msg)
log.webview.error("")
log.webview.error(
'NOTE: If you see this and "Network service crashed, restarting '
'service.", please see:'
)
log.webview.error("https://github.com/qutebrowser/qutebrowser/issues/6235")
log.webview.error(
'You can set the "qt.workarounds.locale" setting in qutebrowser to '
"work around the issue."
)
log.webview.error(
"A proper fix is likely available in QtWebEngine soon (which is why "
"the workaround is disabled by default)."
)
log.webview.error("")
else:
url_string = tab.url(requested=True).toDisplayString()
error_page = jinja.render(
"error.html",
title="Error loading {}".format(url_string),
url=url_string,
error=msg,
)
QTimer.singleShot(100, lambda: show_error_page(error_page))
def resizeEvent(self, e):
"""Extend resizeEvent of QWidget to emit a resized signal afterwards.
Args:
e: The QResizeEvent
"""
super().resizeEvent(e)
self.resized.emit(self.geometry())
def wheelEvent(self, e):
"""Override wheelEvent of QWidget to forward it to the focused tab.
Args:
e: The QWheelEvent
"""
if self._now_focused is not None:
self._now_focused.wheelEvent(e)
else:
e.ignore()
def set_mark(self, key):
"""Set a mark at the current scroll position in the current tab.
Args:
key: mark identifier; capital indicates a global mark
"""
# strip the fragment as it may interfere with scrolling
try:
url = self.current_url().adjusted(QUrl.UrlFormattingOption.RemoveFragment)
except qtutils.QtValueError:
# show an error only if the mark is not automatically set
if key != "'":
message.error("Failed to set mark: url invalid")
return
point = self._current_tab().scroller.pos_px()
if key.isupper():
self._global_marks[key] = point, url
else:
if url not in self._local_marks:
self._local_marks[url] = {}
self._local_marks[url][key] = point
def jump_mark(self, key):
"""Jump to the mark named by `key`.
Args:
key: mark identifier; capital indicates a global mark
"""
try:
# consider urls that differ only in fragment to be identical
urlkey = self.current_url().adjusted(
QUrl.UrlFormattingOption.RemoveFragment
)
except qtutils.QtValueError:
urlkey = None
tab = self._current_tab()
if key.isupper():
if key in self._global_marks:
point, url = self._global_marks[key]
def callback(ok):
"""Scroll once loading finished."""
if ok:
self.cur_load_finished.disconnect(callback)
tab.scroller.to_point(point)
self.load_url(url, newtab=False)
self.cur_load_finished.connect(callback)
else:
message.error("Mark {} is not set".format(key))
elif urlkey is None:
message.error("Current URL is invalid!")
elif urlkey in self._local_marks and key in self._local_marks[urlkey]:
point = self._local_marks[urlkey][key]
# save the pre-jump position in the special ' mark
# this has to happen after we read the mark, otherwise jump_mark
# "'" would just jump to the current position every time
tab.scroller.before_jump_requested.emit()
tab.scroller.to_point(point)
else:
message.error("Mark {} is not set".format(key))
|
xmiexport | exportmodel | import logging
from gaphor.storage.xmlwriter import XMLWriter
logger = logging.getLogger(__name__)
class XMIExport:
XMI_VERSION = "2.1"
XMI_NAMESPACE = "http://schema.omg.org/spec/XMI/2.1"
UML_NAMESPACE = "http://schema.omg.org/spec/UML/2.1"
XMI_PREFIX = "XMI"
UML_PREFIX = "UML"
def __init__(self, element_factory):
self.element_factory = element_factory
self.handled_ids = []
def handle(self, xmi, element):
logger.debug(f"Handling {element.__class__.__name__}")
try:
handler_name = f"handle{element.__class__.__name__}"
handler = getattr(self, handler_name)
idref = element.id in self.handled_ids
handler(xmi, element, idref=idref)
if not idref:
self.handled_ids.append(element.id)
except AttributeError as e:
logger.warning(f"Missing handler for {element.__class__.__name__}:{e}")
except Exception as e:
logger.error(f"Failed to handle {element.__class__.__name__}:{e}")
def handlePackage(self, xmi, element, idref=False):
attributes = {
f"{self.XMI_PREFIX}:id": element.id,
"name": element.name,
"visibility": element.visibility,
}
xmi.startElement(f"{self.UML_PREFIX}:Package", attrs=attributes)
for ownedMember in element.ownedMember:
xmi.startElement("ownedMember", attrs={})
self.handle(xmi, ownedMember)
xmi.endElement("ownedMember")
xmi.endElement(f"{self.UML_PREFIX}:Package")
def handleClass(self, xmi, element, idref=False):
attributes = {}
if idref:
attributes[f"{self.XMI_PREFIX}:idref"] = element.id
else:
attributes[f"{self.XMI_PREFIX}:id"] = element.id
attributes["name"] = element.name
attributes["isAbstract"] = str(element.isAbstract)
xmi.startElement(f"{self.UML_PREFIX}:Class", attrs=attributes)
if not idref:
for ownedAttribute in element.ownedAttribute:
xmi.startElement("ownedAttribute", attrs={})
self.handle(xmi, ownedAttribute)
xmi.endElement("ownedAttribute")
for ownedOperation in element.ownedOperation:
xmi.startElement("ownedOperation", attrs={})
self.handle(xmi, ownedOperation)
xmi.endElement("ownedOperation")
xmi.endElement(f"{self.UML_PREFIX}:Class")
def handleProperty(self, xmi, element, idref=False):
attributes = {
f"{self.XMI_PREFIX}:id": element.id,
"isStatic": str(element.isStatic),
"isOrdered": str(element.isOrdered),
"isUnique": str(element.isUnique),
"isDerived": str(element.isDerived),
"isDerivedUnion": str(element.isDerivedUnion),
"isReadOnly": str(element.isReadOnly),
}
if element.name is not None:
attributes["name"] = element.name
xmi.startElement(f"{self.UML_PREFIX}:Property", attrs=attributes)
# TODO: This should be type, not typeValue.
if element.typeValue is not None:
xmi.startElement("type", attrs={})
self.handle(xmi, element.typeValue)
xmi.endElement("type")
xmi.endElement(f"{self.UML_PREFIX}:Property")
def handleOperation(self, xmi, element, idref=False):
attributes = {
f"{self.XMI_PREFIX}:id": element.id,
"isStatic": str(element.isStatic),
"isQuery": str(element.isQuery),
"name": element.name,
}
xmi.startElement(f"{self.XMI_PREFIX}:Operation", attrs=attributes)
for ownedParameter in element.ownedParameter:
xmi.startElement("ownedElement", attrs={})
self.handle(xmi, ownedParameter)
xmi.endElement("ownedElement")
xmi.endElement(f"{self.XMI_PREFIX}:Operation")
def handleParameter(self, xmi, element, idref=False):
attributes = {
f"{self.XMI_PREFIX}:id": element.id,
"isOrdered": str(element.isOrdered),
"isUnique": str(element.isUnique),
"direction": element.direction,
"name": element.name,
}
xmi.startElement(f"{self.XMI_PREFIX}:Parameter", attrs=attributes)
xmi.endElement(f"{self.XMI_PREFIX}:Parameter")
def handleLiteralSpecification(self, xmi, element, idref=False):
attributes = {f"{self.XMI_PREFIX}:id": element.id, "value": element.value}
xmi.startElement(f"{self.UML_PREFIX}:LiteralSpecification", attrs=attributes)
xmi.endElement(f"{self.UML_PREFIX}:LiteralSpecification")
def handleAssociation(self, xmi, element, idref=False):
attributes = {
f"{self.XMI_PREFIX}:id": element.id,
"isDerived": str(element.isDerived),
}
xmi.startElement(f"{self.UML_PREFIX}:Association", attrs=attributes)
for memberEnd in element.memberEnd:
xmi.startElement("memberEnd", attrs={})
self.handle(xmi, memberEnd)
xmi.endElement("memberEnd")
for ownedEnd in element.ownedEnd:
xmi.startElement("ownedEnd", attrs={})
self.handle(xmi, ownedEnd)
xmi.endElement("ownedEnd")
xmi.endElement(f"{self.UML_PREFIX}:Association")
def handleDependency(self, xmi, element, idref=False, name="Dependency"):
attributes = {f"{self.XMI_PREFIX}:id": element.id}
xmi.startElement(f"{self.UML_PREFIX}:{name}", attrs=attributes)
for client in element.client:
xmi.startElement("client", attrs={})
self.handle(xmi, client)
xmi.endElement("client")
for supplier in element.supplier:
xmi.startElement("supplier", attrs={})
self.handle(xmi, supplier)
xmi.endElement("supplier")
xmi.endElement(f"{self.UML_PREFIX}:{name}")
def handleGeneralization(self, xmi, element, idref=False):
attributes = {
f"{self.XMI_PREFIX}:id": element.id,
"isSubstitutable": str(element.isSubstitutable),
}
xmi.startElement(f"{self.UML_PREFIX}:Generalization", attrs=attributes)
if element.general:
xmi.startElement("general", attrs={})
self.handle(xmi, element.general)
xmi.endElement("general")
if element.specific:
xmi.startElement("specific", attrs={})
self.handle(xmi, element.specific)
xmi.endElement("specific")
xmi.endElement(f"{self.UML_PREFIX}:Generalization")
def handleRealization(self, xmi, element, idref=False):
self.handleDependency(xmi, element, idref, name="Realization")
def handleInterface(self, xmi, element, idref=False):
attributes = {f"{self.XMI_PREFIX}:id": element.id}
xmi.startElement(f"{self.UML_PREFIX}:Interface", attrs=attributes)
for ownedAttribute in element.ownedAttribute:
xmi.startElement("ownedAttribute", attrs={})
self.handle(xmi, ownedAttribute)
xmi.endElement("ownedAttribute")
for ownedOperation in element.ownedOperation:
xmi.startElement("ownedOperation", attrs={})
self.handle(xmi, ownedOperation)
xmi.endElement("ownedOperation")
xmi.endElement(f"{self.UML_PREFIX}:Interface")
def handleDiagram(self, xmi, element, idref=False):
pass
def export(self, filename):
with open(filename, "w", encoding="utf-8") as out:
xmi = XMLWriter(out)
attributes = {
"xmi.version": self.XMI_VERSION,
"xmlns:xmi": self.XMI_NAMESPACE,
"xmlns:UML": self.UML_NAMESPACE,
}
xmi.startElement("XMI", attrs=attributes)
for package in self.element_factory.select(self.select_package):
self.handle(xmi, package)
for generalization in self.element_factory.select(
self.select_generalization
):
self.handle(xmi, generalization)
for realization in self.element_factory.select(self.select_realization):
self.handle(xmi, realization)
xmi.endElement("XMI")
logger.debug(self.handled_ids)
def select_package(self, element):
return element.__class__.__name__ == "Package"
def select_generalization(self, element):
return element.__class__.__name__ == "Generalization"
def select_realization(self, element):
return element.__class__.__name__ == "InterfaceRealization"
|
extractor | tvplay | # coding: utf-8
from __future__ import unicode_literals
import re
from ..compat import compat_HTTPError, compat_urlparse
from ..utils import (
ExtractorError,
determine_ext,
int_or_none,
parse_duration,
parse_iso8601,
qualities,
try_get,
update_url_query,
url_or_none,
urljoin,
)
from .common import InfoExtractor
class TVPlayIE(InfoExtractor):
IE_NAME = "mtg"
IE_DESC = "MTG services"
_VALID_URL = r"""(?x)
(?:
mtg:|
https?://
(?:www\.)?
(?:
tvplay(?:\.skaties)?\.lv(?:/parraides)?|
(?:tv3play|play\.tv3)\.lt(?:/programos)?|
tv3play(?:\.tv3)?\.ee/sisu|
(?:tv(?:3|6|8|10)play|viafree)\.se/program|
(?:(?:tv3play|viasat4play|tv6play|viafree)\.no|(?:tv3play|viafree)\.dk)/programmer|
play\.nova(?:tv)?\.bg/programi
)
/(?:[^/]+/)+
)
(?P<id>\d+)
"""
_TESTS = [
{
"url": "http://www.tvplay.lv/parraides/vinas-melo-labak/418113?autostart=true",
"md5": "a1612fe0849455423ad8718fe049be21",
"info_dict": {
"id": "418113",
"ext": "mp4",
"title": "Kādi ir īri? - Viņas melo labāk",
"description": "Baiba apsmej īrus, kādi tie ir un ko viņi dara.",
"series": "Viņas melo labāk",
"season": "2.sezona",
"season_number": 2,
"duration": 25,
"timestamp": 1406097056,
"upload_date": "20140723",
},
},
{
"url": "http://play.tv3.lt/programos/moterys-meluoja-geriau/409229?autostart=true",
"info_dict": {
"id": "409229",
"ext": "flv",
"title": "Moterys meluoja geriau",
"description": "md5:9aec0fc68e2cbc992d2a140bd41fa89e",
"series": "Moterys meluoja geriau",
"episode_number": 47,
"season": "1 sezonas",
"season_number": 1,
"duration": 1330,
"timestamp": 1403769181,
"upload_date": "20140626",
},
"params": {
# rtmp download
"skip_download": True,
},
},
{
"url": "http://www.tv3play.ee/sisu/kodu-keset-linna/238551?autostart=true",
"info_dict": {
"id": "238551",
"ext": "flv",
"title": "Kodu keset linna 398537",
"description": "md5:7df175e3c94db9e47c0d81ffa5d68701",
"duration": 1257,
"timestamp": 1292449761,
"upload_date": "20101215",
},
"params": {
# rtmp download
"skip_download": True,
},
},
{
"url": "http://www.tv3play.se/program/husraddarna/395385?autostart=true",
"info_dict": {
"id": "395385",
"ext": "mp4",
"title": "Husräddarna S02E07",
"description": "md5:f210c6c89f42d4fc39faa551be813777",
"duration": 2574,
"timestamp": 1400596321,
"upload_date": "20140520",
},
"params": {
"skip_download": True,
},
},
{
"url": "http://www.tv6play.se/program/den-sista-dokusapan/266636?autostart=true",
"info_dict": {
"id": "266636",
"ext": "mp4",
"title": "Den sista dokusåpan S01E08",
"description": "md5:295be39c872520221b933830f660b110",
"duration": 1492,
"timestamp": 1330522854,
"upload_date": "20120229",
"age_limit": 18,
},
"params": {
"skip_download": True,
},
},
{
"url": "http://www.tv8play.se/program/antikjakten/282756?autostart=true",
"info_dict": {
"id": "282756",
"ext": "mp4",
"title": "Antikjakten S01E10",
"description": "md5:1b201169beabd97e20c5ad0ad67b13b8",
"duration": 2646,
"timestamp": 1348575868,
"upload_date": "20120925",
},
"params": {
"skip_download": True,
},
},
{
"url": "http://www.tv3play.no/programmer/anna-anka-soker-assistent/230898?autostart=true",
"info_dict": {
"id": "230898",
"ext": "mp4",
"title": "Anna Anka søker assistent - Ep. 8",
"description": "md5:f80916bf5bbe1c5f760d127f8dd71474",
"duration": 2656,
"timestamp": 1277720005,
"upload_date": "20100628",
},
"params": {
"skip_download": True,
},
},
{
"url": "http://www.viasat4play.no/programmer/budbringerne/21873?autostart=true",
"info_dict": {
"id": "21873",
"ext": "mp4",
"title": "Budbringerne program 10",
"description": "md5:4db78dc4ec8a85bb04fd322a3ee5092d",
"duration": 1297,
"timestamp": 1254205102,
"upload_date": "20090929",
},
"params": {
"skip_download": True,
},
},
{
"url": "http://www.tv6play.no/programmer/hotelinspektor-alex-polizzi/361883?autostart=true",
"info_dict": {
"id": "361883",
"ext": "mp4",
"title": "Hotelinspektør Alex Polizzi - Ep. 10",
"description": "md5:3ecf808db9ec96c862c8ecb3a7fdaf81",
"duration": 2594,
"timestamp": 1393236292,
"upload_date": "20140224",
},
"params": {
"skip_download": True,
},
},
{
"url": "http://play.novatv.bg/programi/zdravei-bulgariya/624952?autostart=true",
"info_dict": {
"id": "624952",
"ext": "flv",
"title": "Здравей, България (12.06.2015 г.) ",
"description": "md5:99f3700451ac5bb71a260268b8daefd7",
"duration": 8838,
"timestamp": 1434100372,
"upload_date": "20150612",
},
"params": {
# rtmp download
"skip_download": True,
},
},
{
"url": "https://play.nova.bg/programi/zdravei-bulgariya/764300?autostart=true",
"only_matching": True,
},
{
"url": "http://tvplay.skaties.lv/parraides/vinas-melo-labak/418113?autostart=true",
"only_matching": True,
},
{
"url": "https://tvplay.skaties.lv/vinas-melo-labak/418113/?autostart=true",
"only_matching": True,
},
{
# views is null
"url": "http://tvplay.skaties.lv/parraides/tv3-zinas/760183",
"only_matching": True,
},
{
"url": "http://tv3play.tv3.ee/sisu/kodu-keset-linna/238551?autostart=true",
"only_matching": True,
},
{
"url": "http://www.viafree.se/program/underhallning/i-like-radio-live/sasong-1/676869",
"only_matching": True,
},
{
"url": "mtg:418113",
"only_matching": True,
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
geo_country = self._search_regex(
r"https?://[^/]+\.([a-z]{2})", url, "geo country", default=None
)
if geo_country:
self._initialize_geo_bypass({"countries": [geo_country.upper()]})
video = self._download_json(
"http://playapi.mtgx.tv/v3/videos/%s" % video_id,
video_id,
"Downloading video JSON",
)
title = video["title"]
try:
streams = self._download_json(
"http://playapi.mtgx.tv/v3/videos/stream/%s" % video_id,
video_id,
"Downloading streams JSON",
)
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403:
msg = self._parse_json(e.cause.read().decode("utf-8"), video_id)
raise ExtractorError(msg["msg"], expected=True)
raise
quality = qualities(["hls", "medium", "high"])
formats = []
for format_id, video_url in streams.get("streams", {}).items():
video_url = url_or_none(video_url)
if not video_url:
continue
ext = determine_ext(video_url)
if ext == "f4m":
formats.extend(
self._extract_f4m_formats(
update_url_query(
video_url,
{"hdcore": "3.5.0", "plugin": "aasp-3.5.0.151.81"},
),
video_id,
f4m_id="hds",
fatal=False,
)
)
elif ext == "m3u8":
formats.extend(
self._extract_m3u8_formats(
video_url,
video_id,
"mp4",
"m3u8_native",
m3u8_id="hls",
fatal=False,
)
)
else:
fmt = {
"format_id": format_id,
"quality": quality(format_id),
"ext": ext,
}
if video_url.startswith("rtmp"):
m = re.search(
r"^(?P<url>rtmp://[^/]+/(?P<app>[^/]+))/(?P<playpath>.+)$",
video_url,
)
if not m:
continue
fmt.update(
{
"ext": "flv",
"url": m.group("url"),
"app": m.group("app"),
"play_path": m.group("playpath"),
"preference": -1,
}
)
else:
fmt.update(
{
"url": video_url,
}
)
formats.append(fmt)
if not formats and video.get("is_geo_blocked"):
self.raise_geo_restricted(
"This content might not be available in your country due to copyright reasons"
)
self._sort_formats(formats)
# TODO: webvtt in m3u8
subtitles = {}
sami_path = video.get("sami_path")
if sami_path:
lang = self._search_regex(
r"_([a-z]{2})\.xml",
sami_path,
"lang",
default=compat_urlparse.urlparse(url).netloc.rsplit(".", 1)[-1],
)
subtitles[lang] = [
{
"url": sami_path,
}
]
series = video.get("format_title")
episode_number = int_or_none(video.get("format_position", {}).get("episode"))
season = video.get("_embedded", {}).get("season", {}).get("title")
season_number = int_or_none(video.get("format_position", {}).get("season"))
return {
"id": video_id,
"title": title,
"description": video.get("description"),
"series": series,
"episode_number": episode_number,
"season": season,
"season_number": season_number,
"duration": int_or_none(video.get("duration")),
"timestamp": parse_iso8601(video.get("created_at")),
"view_count": try_get(video, lambda x: x["views"]["total"], int),
"age_limit": int_or_none(video.get("age_limit", 0)),
"formats": formats,
"subtitles": subtitles,
}
class ViafreeIE(InfoExtractor):
_VALID_URL = r"""(?x)
https?://
(?:www\.)?
viafree\.(?P<country>dk|no|se)
/(?P<id>program(?:mer)?/(?:[^/]+/)+[^/?#&]+)
"""
_TESTS = [
{
"url": "http://www.viafree.no/programmer/underholdning/det-beste-vorspielet/sesong-2/episode-1",
"info_dict": {
"id": "757786",
"ext": "mp4",
"title": "Det beste vorspielet - Sesong 2 - Episode 1",
"description": "md5:b632cb848331404ccacd8cd03e83b4c3",
"series": "Det beste vorspielet",
"season_number": 2,
"duration": 1116,
"timestamp": 1471200600,
"upload_date": "20160814",
},
"params": {
"skip_download": True,
},
},
{
# with relatedClips
"url": "http://www.viafree.se/program/reality/sommaren-med-youtube-stjarnorna/sasong-1/avsnitt-1",
"only_matching": True,
},
{
# Different og:image URL schema
"url": "http://www.viafree.se/program/reality/sommaren-med-youtube-stjarnorna/sasong-1/avsnitt-2",
"only_matching": True,
},
{
"url": "http://www.viafree.se/program/livsstil/husraddarna/sasong-2/avsnitt-2",
"only_matching": True,
},
{
"url": "http://www.viafree.dk/programmer/reality/paradise-hotel/saeson-7/episode-5",
"only_matching": True,
},
]
_GEO_BYPASS = False
@classmethod
def suitable(cls, url):
return False if TVPlayIE.suitable(url) else super(ViafreeIE, cls).suitable(url)
def _real_extract(self, url):
country, path = re.match(self._VALID_URL, url).groups()
content = self._download_json(
"https://viafree-content.mtg-api.com/viafree-content/v1/%s/path/%s"
% (country, path),
path,
)
program = content["_embedded"]["viafreeBlocks"][0]["_embedded"]["program"]
guid = program["guid"]
meta = content["meta"]
title = meta["title"]
try:
stream_href = self._download_json(
program["_links"]["streamLink"]["href"],
guid,
headers=self.geo_verification_headers(),
)["embedded"]["prioritizedStreams"][0]["links"]["stream"]["href"]
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403:
self.raise_geo_restricted(countries=[country])
raise
formats = self._extract_m3u8_formats(stream_href, guid, "mp4")
self._sort_formats(formats)
episode = program.get("episode") or {}
return {
"id": guid,
"title": title,
"thumbnail": meta.get("image"),
"description": meta.get("description"),
"series": episode.get("seriesTitle"),
"episode_number": int_or_none(episode.get("episodeNumber")),
"season_number": int_or_none(episode.get("seasonNumber")),
"duration": int_or_none(
try_get(program, lambda x: x["video"]["duration"]["milliseconds"]), 1000
),
"timestamp": parse_iso8601(
try_get(program, lambda x: x["availability"]["start"])
),
"formats": formats,
}
class TVPlayHomeIE(InfoExtractor):
_VALID_URL = r"https?://(?:tv3?)?play\.(?:tv3\.lt|skaties\.lv|tv3\.ee)/(?:[^/]+/)*[^/?#&]+-(?P<id>\d+)"
_TESTS = [
{
"url": "https://tvplay.tv3.lt/aferistai-n-7/aferistai-10047125/",
"info_dict": {
"id": "366367",
"ext": "mp4",
"title": "Aferistai",
"description": "Aferistai. Kalėdinė pasaka.",
"series": "Aferistai [N-7]",
"season": "1 sezonas",
"season_number": 1,
"duration": 464,
"timestamp": 1394209658,
"upload_date": "20140307",
"age_limit": 18,
},
"params": {
"skip_download": True,
},
},
{
"url": "https://tvplay.skaties.lv/vinas-melo-labak/vinas-melo-labak-10280317/",
"only_matching": True,
},
{
"url": "https://tvplay.tv3.ee/cool-d-ga-mehhikosse/cool-d-ga-mehhikosse-10044354/",
"only_matching": True,
},
{
"url": "https://play.tv3.lt/aferistai-10047125",
"only_matching": True,
},
{
"url": "https://tv3play.skaties.lv/vinas-melo-labak-10280317",
"only_matching": True,
},
{
"url": "https://play.tv3.ee/cool-d-ga-mehhikosse-10044354",
"only_matching": True,
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
asset = self._download_json(
urljoin(url, "/sb/public/asset/" + video_id), video_id
)
m3u8_url = asset["movie"]["contentUrl"]
video_id = asset["assetId"]
asset_title = asset["title"]
title = asset_title["title"]
formats = self._extract_m3u8_formats(
m3u8_url, video_id, "mp4", "m3u8_native", m3u8_id="hls"
)
self._sort_formats(formats)
thumbnails = None
image_url = asset.get("imageUrl")
if image_url:
thumbnails = [
{
"url": urljoin(url, image_url),
"ext": "jpg",
}
]
metadata = asset.get("metadata") or {}
return {
"id": video_id,
"title": title,
"description": asset_title.get("summaryLong")
or asset_title.get("summaryShort"),
"thumbnails": thumbnails,
"duration": parse_duration(asset_title.get("runTime")),
"series": asset.get("tvSeriesTitle"),
"season": asset.get("tvSeasonTitle"),
"season_number": int_or_none(metadata.get("seasonNumber")),
"episode": asset_title.get("titleBrief"),
"episode_number": int_or_none(metadata.get("episodeNumber")),
"formats": formats,
}
|
pol | server | from __future__ import print_function
import json
import pickle
import re
import sys
import time
import traceback
from datetime import datetime
from hashlib import md5
import six
from lxml import etree
from twisted.internet import defer, endpoints, reactor
from twisted.web import resource, server
from twisted.web.client import (
Agent,
BrowserLikeRedirectAgent,
HTTPConnectionPool,
PartialDownloadError,
)
from twisted.web.html import escape
from twisted.web.http import INTERNAL_SERVER_ERROR
from twisted.web.http_headers import Headers
from twisted.web.server import NOT_DONE_YET
twisted_headers = Headers
from pol.log import LogHandler
from scrapy.core.downloader.contextfactory import ScrapyClientContextFactory
from scrapy.downloadermiddlewares.decompression import DecompressionMiddleware
from scrapy.downloadermiddlewares.httpcompression import HttpCompressionMiddleware
from scrapy.http import Headers
from scrapy.http.request import Request
from scrapy.http.response.text import TextResponse
from scrapy.responsetypes import responsetypes
from scrapy.selector import Selector
from twisted.logger import Logger
from .client import IGNORE_SIZE, ppReadBody
from .feed import Feed
log = Logger()
class Downloader(object):
def __init__(
self,
feed,
debug,
snapshot_dir,
stat_tool,
memon,
request,
url,
feed_config,
selector_defer,
sanitize,
max_size,
):
self.feed = feed
self.debug = debug
self.snapshot_dir = snapshot_dir
self.stat_tool = stat_tool
self.memon = memon
self.request = request
self.url = url
self.feed_config = feed_config
self.selector_defer = selector_defer
self.sanitize = sanitize
self.max_size = max_size
def html2json(self, el):
return [
el.tag,
{k: v for (k, v) in el.items() if k in ["tag-id", "class"]}, # attributes
[
self.html2json(e)
for e in el.getchildren()
if isinstance(e, etree.ElementBase)
],
]
def _saveResponse(self, headers, url, tree):
# save html for extended selectors
if six.PY2:
file_name = "%s_%s" % (time.time(), md5(url).hexdigest())
elif six.PY3:
file_name = "%s_%s" % (time.time(), md5(url.encode("utf-8")).hexdigest())
file_path = self.snapshot_dir + "/" + file_name
with open(file_path, "w") as f:
f.write(url + "\n")
if six.PY2:
for k, v in headers.iteritems():
for vv in v:
f.write("%s: %s\n" % (k, vv))
elif six.PY3:
for k, v in headers.items():
for vv in v:
f.write("%s: %s\n" % (k, vv))
if six.PY2:
f.write("\n\n" + etree.tostring(tree, encoding="utf-8", method="html"))
elif six.PY3:
f.write(
"\n\n"
+ etree.tostring(tree, encoding="utf-8", method="html").decode(
"utf-8"
)
)
return file_name
def sanitizeAndNumerate(self, selector, numerate=True, sanitize_anchors=True):
tree = selector.root.getroottree()
i = 1
for bad in tree.xpath("//*"):
# remove scripts and iframes
if bad.tag in ["script", "iframe"]:
bad.getparent().remove(bad)
elif numerate:
# set tag-id attribute
bad.attrib["tag-id"] = str(i)
i += 1
# sanitize anchors
if sanitize_anchors and bad.tag == "a" and "href" in bad.attrib:
bad.attrib["origin-href"] = bad.attrib["href"]
del bad.attrib["href"]
# remove html events
for attr in bad.attrib:
if attr.startswith("on"):
del bad.attrib[attr]
# make clickable for mobile
bad.attrib["onclick"] = ""
# sanitize forms
if bad.tag == "form":
bad.attrib["onsubmit"] = "return false"
def setBaseAndRemoveScriptsAndMore(self, selector, headers, url):
selector.remove_namespaces()
tree = selector.root.getroottree()
if self.snapshot_dir:
file_name = self._saveResponse(headers, url, tree)
else:
file_name = "DISABLED"
# set base url to html document
head = tree.xpath("//head")
if head:
head = head[0]
base = head.xpath("./base")
if base:
base = base[0]
else:
base = etree.Element("base")
head.insert(0, base)
if six.PY2:
base.set("href", url.decode("utf-8"))
elif six.PY3:
base.set("href", url)
self.sanitizeAndNumerate(selector)
body = tree.xpath("//body")
if body:
# append html2json js object
jsobj = self.html2json(tree.getroot())
script = etree.Element("script", {"type": "text/javascript"})
script.text = "\n".join(
(
"var html2json = " + json.dumps(jsobj) + ";",
'var snapshot_time = "' + file_name + '";',
)
)
body[0].append(script)
if six.PY2:
return etree.tostring(tree, method="html")
elif six.PY3:
return etree.tostring(tree, method="html").decode("utf-8")
def buildScrapyResponse(self, response, body, url):
status = response.code
headers = Headers(
{k: ",".join(v) for k, v in response.headers.getAllRawHeaders()}
)
respcls = responsetypes.from_args(headers=headers, url=url)
return respcls(url=url, status=status, headers=headers, body=body)
def error_html(self, msg):
return "<html><body>%s</body></html>" % msg.replace("\n", "<br/>\n")
def downloadError(self, error):
# read for details: https://stackoverflow.com/questions/29423986/twisted-giving-twisted-web-client-partialdownloaderror-200-ok
if error.type is PartialDownloadError and error.value.status == "200":
d = defer.Deferred()
reactor.callLater(
0, d.callback, error.value.response
) # error.value.response is response_str
d.addCallback(self.downloadDone)
d.addErrback(self.downloadError)
return
if self.selector_defer:
self.selector_defer.errback(error)
else:
try:
if self.stat_tool:
feed_id = self.feed_config and self.feed_config["id"]
s_url = None
if not feed_id:
feed_id = 0
s_url = self.url
self.stat_tool.trace(
ip=self.request.getHeader("x-real-ip")
or self.request.client.host,
feed_id=feed_id,
post_cnt=0,
new_post_cnt=0,
url=s_url,
ex_msg=error.getErrorMessage(),
ex_callstack=error.getTraceback(),
)
else:
sys.stderr.write(
"\n".join(
[
str(datetime.utcnow()),
self.request.uri,
self.url,
"Downloader error: " + error.getErrorMessage(),
"Traceback: " + error.getTraceback(),
]
)
)
except:
traceback.print_exc(file=sys.stdout)
self.request.setResponseCode(INTERNAL_SERVER_ERROR)
if self.debug:
self.request.write("Downloader error: " + error.getErrorMessage())
self.request.write("Traceback: " + error.getTraceback())
else:
err_message = self.error_html(
'<h1>PolitePol says: "Something wrong"</h1> <p><b>Try to refresh page or contact us by email: <a href="mailto:politepol.com@gmail.com">politepol.com@gmail.com</a></b>\n(Help us to improve our service with your feedback)</p> <p><i>Scary mantra: %s</i></p>'
% escape(error.getErrorMessage())
)
self.request.write(err_message)
self.request.finish()
def downloadStarted(self, response):
self.response = response
d = ppReadBody(response, self.max_size)
d.addCallback(self.downloadDone)
d.addErrback(self.downloadError)
return response
def downloadDone(self, response_str):
url = self.response.request.absoluteURI
print("Response <%s> ready (%s bytes)" % (url, len(response_str)))
sresponse = self.buildScrapyResponse(self.response, response_str, url)
if self.selector_defer:
self.selector_defer.callback(sresponse)
else:
self.writeResponse(sresponse)
self.run_memon()
def writeResponse(
self, sresponse
): # , response_str='PolitePol: Local page processing is failed'
sresponse = HttpCompressionMiddleware().process_response(
Request(sresponse.url), sresponse, None
)
sresponse = DecompressionMiddleware().process_response(None, sresponse, None)
response_headers = self.prepare_response_headers(sresponse.headers)
if isinstance(sresponse, TextResponse):
ip = self.request.getHeader("x-real-ip") or self.request.client.host
response_str = self.prepare_response_str(
sresponse.selector,
sresponse.headers,
sresponse.body_as_unicode(),
sresponse.url,
ip,
)
if self.feed_config:
response_headers = {b"Content-Type": b"text/xml; charset=utf-8"}
else: # images and such
response_str = sresponse.body
for k, v in response_headers.items():
self.request.setHeader(k, v)
self.request.write(response_str)
self.request.finish()
def prepare_response_headers(self, headers):
return {}
def prepare_response_str(self, selector, headers, page_unicode, url, ip=None):
if self.feed_config:
if self.sanitize:
self.sanitizeAndNumerate(
selector, numerate=False, sanitize_anchors=False
)
[response_str, post_cnt, new_post_cnt] = self.feed.buildFeed(
selector, page_unicode, self.feed_config
)
if self.stat_tool:
self.stat_tool.trace(
ip=ip,
feed_id=self.feed_config["id"],
post_cnt=post_cnt,
new_post_cnt=new_post_cnt,
)
else:
response_str = self.setBaseAndRemoveScriptsAndMore(selector, headers, url)
if self.stat_tool:
self.stat_tool.trace(
ip=ip, feed_id=0, post_cnt=0, new_post_cnt=0, url=url
)
return response_str
def run_memon(self):
if self.memon:
d = defer.Deferred()
reactor.callLater(0, d.callback, None)
d.addCallback(self.memon.show_diff)
d.addErrback(
lambda err: print(
"Memory Monitor error: %s\nPGC traceback: %s"
% (err.getErrorMessage(), err.getTraceback())
)
)
class Site(resource.Resource):
isLeaf = True
feed_regexp = re.compile(b"^/feed/(\d{1,10})")
def __init__(
self,
db_creds,
snapshot_dir,
user_agent,
debug=False,
limiter=None,
memon=None,
stat_tool=None,
prefetch_dir=None,
feed=None,
downloadercls=None,
max_size=IGNORE_SIZE,
):
self.db_creds = db_creds
self.snapshot_dir = snapshot_dir
self.user_agent = user_agent
self.limiter = limiter
self.prefetch_dir = prefetch_dir
self.feed = feed or Feed(db_creds)
self.debug = debug
self.stat_tool = stat_tool
self.memon = memon
self.max_size = max_size
self.downloadercls = downloadercls or Downloader
def startRequest(
self, request, url, feed_config=None, selector_defer=None, sanitize=False
):
downloader = self.downloadercls(
self.feed,
self.debug,
self.snapshot_dir,
self.stat_tool,
self.memon,
request=request,
url=url,
feed_config=feed_config,
selector_defer=selector_defer,
sanitize=sanitize,
max_size=self.max_size,
)
sresponse = self.tryLocalPage(url)
if sresponse:
if selector_defer:
reactor.callLater(0, selector_defer.callback, sresponse)
else:
downloader.writeResponse(request, sresponse, feed_config)
else:
agent = BrowserLikeRedirectAgent(
Agent(
reactor,
contextFactory=ScrapyClientContextFactory(), # skip certificate verification
connectTimeout=10,
),
# pool=pool),
redirectLimit=5,
)
d = agent.request(
"GET",
url,
twisted_headers(
{
"Accept": [
"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"
],
"Accept-Encoding": ["gzip, deflate, sdch"],
"User-Agent": [self.user_agent],
}
),
None,
)
print("Request <GET %s> started" % (url,))
d.addCallback(downloader.downloadStarted)
d.addErrback(downloader.downloadError)
def tryLocalPage(self, url):
if self.prefetch_dir:
m = md5(url).hexdigest()
domain = urlparse(url).netloc
try:
with open(self.prefetch_dir + "/" + m + "." + domain) as f:
return pickle.load(f)
except IOError:
pass
return None
def render_GET(self, request):
"""
Render page for frontend or RSS feed
"""
if b"url" in request.args: # page for frontend
url = request.args[b"url"][0]
self.startRequest(request, url, sanitize=True)
return NOT_DONE_YET
elif self.feed_regexp.match(request.uri) is not None: # feed
feed_id = self.feed_regexp.match(request.uri).groups()[0]
sanitize = request.uri.endswith(b"?sanitize=Y")
time_left = (
self.limiter.check_request_time_limit(request.uri)
if self.limiter
else 0
)
if time_left:
request.setResponseCode(429)
request.setHeader("Retry-After", str(time_left) + " seconds")
return b"Too Many Requests. Retry after %s seconds" % (str(time_left))
else:
res = self.feed.getFeedData(feed_id)
if isinstance(res, basestring): # error message
return res
url, feed_config = res
self.startRequest(request, url, feed_config, sanitize=sanitize)
return NOT_DONE_YET
else: # neither page and feed
return "Url is invalid"
class Server(object):
def __init__(
self,
port,
db_creds,
snapshot_dir,
user_agent,
debug=False,
limiter=None,
memon=None,
stat_tool=None,
prefetch_dir=None,
feed=None,
sitecls=None,
downloadercls=None,
max_size=IGNORE_SIZE,
):
self.port = port
self.db_creds = db_creds
self.snapshot_dir = snapshot_dir
self.user_agent = user_agent
self.debug = debug
self.limiter = limiter
self.memon = memon
self.stat_tool = stat_tool
self.prefetch_dir = prefetch_dir
self.log_handler = LogHandler()
if not sitecls:
sitecls = Site
self.site = sitecls(
self.db_creds,
self.snapshot_dir,
self.user_agent,
self.debug,
self.limiter,
self.memon,
self.stat_tool,
self.prefetch_dir,
feed,
downloadercls=downloadercls,
max_size=max_size,
)
def requestSelector(self, url=None, feed_config=None):
d = defer.Deferred()
self.site.startRequest(None, url, feed_config=feed_config, selector_defer=d)
return d
def run(self):
endpoints.serverFromString(reactor, "tcp:%s" % self.port).listen(
server.Site(self.site)
)
reactor.run()
|
util | OpensslFindPatch | import ctypes.util
import logging
import os
import sys
from Config import config
find_library_original = ctypes.util.find_library
def getOpensslPath():
if config.openssl_lib_file:
return config.openssl_lib_file
if sys.platform.startswith("win"):
lib_paths = [
os.path.join(
os.getcwd(), "tools/openssl/libeay32.dll"
), # ZeroBundle Windows
os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1-x64.dll"),
os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1.dll"),
]
elif sys.platform == "cygwin":
lib_paths = ["/bin/cygcrypto-1.0.0.dll"]
else:
lib_paths = [
"../runtime/lib/libcrypto.so.1.1", # ZeroBundle Linux
"../../Frameworks/libcrypto.1.1.dylib", # ZeroBundle macOS
"/opt/lib/libcrypto.so.1.0.0", # For optware and entware
"/usr/local/ssl/lib/libcrypto.so",
]
for lib_path in lib_paths:
if os.path.isfile(lib_path):
return lib_path
if "ANDROID_APP_PATH" in os.environ:
try:
lib_dir = os.environ["ANDROID_APP_PATH"] + "/../../lib"
return [lib for lib in os.listdir(lib_dir) if "crypto" in lib][0]
except Exception as err:
logging.debug("OpenSSL lib not found in: %s (%s)" % (lib_dir, err))
if "LD_LIBRARY_PATH" in os.environ:
lib_dir_paths = os.environ["LD_LIBRARY_PATH"].split(":")
for path in lib_dir_paths:
try:
return [lib for lib in os.listdir(path) if "libcrypto.so" in lib][0]
except Exception as err:
logging.debug("OpenSSL lib not found in: %s (%s)" % (path, err))
lib_path = (
find_library_original("ssl.so")
or find_library_original("ssl")
or find_library_original("crypto")
or find_library_original("libcrypto")
or "libeay32"
)
return lib_path
def patchCtypesOpensslFindLibrary():
def findLibraryPatched(name):
if name in ("ssl", "crypto", "libeay32"):
lib_path = getOpensslPath()
return lib_path
else:
return find_library_original(name)
ctypes.util.find_library = findLibraryPatched
patchCtypesOpensslFindLibrary()
|
posthog | utils_cors | from urllib.parse import urlparse
CORS_ALLOWED_TRACING_HEADERS = (
"traceparent",
"request-id",
"request-context",
"x-amzn-trace-id",
"x-cloud-trace-context",
)
def cors_response(request, response):
if not request.META.get("HTTP_ORIGIN"):
return response
url = urlparse(request.META["HTTP_ORIGIN"])
response["Access-Control-Allow-Origin"] = f"{url.scheme}://{url.netloc}"
response["Access-Control-Allow-Credentials"] = "true"
response["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS"
# Handle headers that sentry randomly sends for every request.
# Would cause a CORS failure otherwise.
# specified here to override the default added by the cors headers package in web.py
allow_headers = request.META.get("HTTP_ACCESS_CONTROL_REQUEST_HEADERS", "").split(
","
)
allow_headers = [
header for header in allow_headers if header in CORS_ALLOWED_TRACING_HEADERS
]
response["Access-Control-Allow-Headers"] = "X-Requested-With,Content-Type" + (
"," + ",".join(allow_headers) if len(allow_headers) > 0 else ""
)
return response
|
utils | organise | import os
import shutil
from hashlib import md5
from io import StringIO
from photonix.photos.models import LibraryPath
from photonix.photos.utils.db import record_photo
from photonix.photos.utils.fs import determine_destination, find_new_file_name, mkdir_p
from photonix.photos.utils.metadata import get_datetime
from PIL import Image
SYNOLOGY_THUMBNAILS_DIR_NAME = "/@eaDir"
class FileHashCache(object):
"""
Used with determine_same_file() function. Can keep hold of the previously
opened orig and dest file contents. Can keep hold of all file-based and
image-based hashes per file.
"""
file_hash_cache = {}
file_data = {"orig": (None, None), "dest": (None, None)}
def reset(self):
self.file_hash_cache = {}
def get_file_hash(self, fn, hash_type):
if fn in self.file_hash_cache and hash_type in self.file_hash_cache[fn]:
return self.file_hash_cache[fn][hash_type]
return None
def set_file_hash(self, fn, hash_type, hash_val):
if fn not in self.file_hash_cache:
self.file_hash_cache[fn] = {}
self.file_hash_cache[fn][hash_type] = hash_val
def get_file(self, fn, file_type):
if self.file_data[file_type][0] != fn:
self.file_data[file_type] = (fn, open(fn, "rb").read())
return self.file_data[file_type][1]
def determine_same_file(origpath, destpath, fhc=None):
"""
First check if hashes of the two files match. If they don't match, they
could still be the same image if metadata has changed so open the pixel
data using PIL and compare hashes of that.
"""
if not fhc:
fhc = FileHashCache()
if len(fhc.file_hash_cache) > 1000:
fhc.reset()
orig_hash = fhc.get_file_hash(origpath, "file")
if not orig_hash:
orig_hash = md5(fhc.get_file(origpath, "orig")).hexdigest()
fhc.set_file_hash(origpath, "file", orig_hash)
dest_hash = fhc.get_file_hash(destpath, "file")
if not dest_hash:
dest_hash = md5(fhc.get_file(destpath, "dest")).hexdigest()
fhc.set_file_hash(destpath, "file", dest_hash)
if orig_hash == dest_hash:
return True
# Try matching on image data (ignoring EXIF)
if os.path.splitext(origpath)[1][1:].lower() in [
"jpg",
"jpeg",
"png",
]:
orig_hash = fhc.get_file_hash(origpath, "image")
if not orig_hash:
orig_hash = md5(
Image.open(StringIO(fhc.get_file(origpath, "orig"))).tobytes()
).hexdigest()
fhc.set_file_hash(origpath, "image", orig_hash)
dest_hash = fhc.get_file_hash(destpath, "image")
if not dest_hash:
dest_hash = md5(
Image.open(StringIO(fhc.get_file(destpath, "dest"))).tobytes()
).hexdigest()
fhc.set_file_hash(destpath, "image", dest_hash)
if orig_hash == dest_hash:
return True
# TODO: Convert raw photos into temp jpgs to do proper comparison
return False
def blacklisted_type(file):
ext = file.split(".")[-1].lower()
if ext in ["mov", "mp4", "mkv", "xmp"]:
return True
if file == ".DS_Store":
return True
return False
def import_photos_from_dir(orig, move=False):
imported = 0
were_duplicates = 0
were_bad = 0
for r, d, f in os.walk(orig):
if SYNOLOGY_THUMBNAILS_DIR_NAME in r:
continue
for fn in sorted(f):
filepath = os.path.join(r, fn)
dest = determine_destination(filepath)
if blacklisted_type(fn):
# Blacklisted type
were_bad += 1
elif not dest:
# No filters match this file type
pass
else:
t = get_datetime(filepath)
if t:
destpath = "%02d/%02d/%02d" % (t.year, t.month, t.day)
destpath = os.path.join(dest, destpath)
mkdir_p(destpath)
destpath = os.path.join(destpath, fn)
if filepath == destpath:
# File is already in the right place so be very careful not to do anything like delete it
pass
elif not os.path.exists(destpath):
if move:
shutil.move(filepath, destpath)
else:
shutil.copyfile(filepath, destpath)
record_photo(destpath)
imported += 1
print("IMPORTED {} -> {}".format(filepath, destpath))
else:
print("PATH EXISTS {} -> {}".format(filepath, destpath))
same = determine_same_file(filepath, destpath)
print("PHOTO IS THE SAME")
if same:
if move:
os.remove(filepath)
were_duplicates += 1
print("DELETED FROM SOURCE")
else:
print("NEED TO IMPORT UNDER DIFFERENT NAME")
exit(1)
destpath = find_new_file_name(destpath)
shutil.move(filepath, destpath)
record_photo(destpath)
imported += 1
# print 'IMPORTED {} -> {}'.format(filepath, destpath)
else:
print("ERROR READING DATE: {}".format(filepath))
were_bad += 1
if imported or were_duplicates:
print(
"\n{} PHOTOS IMPORTED\n{} WERE DUPLICATES\n{} WERE BAD".format(
imported, were_duplicates, were_bad
)
)
def import_photos_in_place(library_path):
orig = library_path.path
imported = 0
were_bad = 0
for r, d, f in os.walk(orig):
if SYNOLOGY_THUMBNAILS_DIR_NAME in r:
continue
for fn in sorted(f):
filepath = os.path.join(r, fn)
if blacklisted_type(fn):
# Blacklisted type
were_bad += 1
else:
modified = record_photo(filepath, library_path.library)
if modified:
imported += 1
print("IMPORTED {}".format(filepath))
if imported:
print("\n{} PHOTOS IMPORTED\n{} WERE BAD".format(imported, were_bad))
def rescan_photo_libraries(paths=[]):
library_paths = LibraryPath.objects.filter(type="St", backend_type="Lo")
if paths:
library_paths = library_paths.filter(path__in=paths)
for library_path in library_paths:
print(f"Searching path for changes {library_path.path}")
library_path.rescan()
|
drawBot | drawBotPageDrawingTools | from .drawBotDrawingTools import DrawBotDrawingTool, _drawBotDrawingTool
class DummyDrawBotDrawingTool(DrawBotDrawingTool):
def __init__(self, instructionSet):
super(DummyDrawBotDrawingTool, self).__init__()
# add the instruction set
self._instructionsStack.append(instructionSet)
# draw all instructions into it self
# just to set all attributes into the dummycontext
# this is important for the current state
self._drawInContext(self)
def _addInstruction(self, callback, *args, **kwargs):
# dont add any instructions
pass
class DrawBotPage(object):
def __init__(self, instructionSet):
self._instructionSet = instructionSet
def __enter__(self):
# copy/save a state of the existing drawing tool
self._originalTool = _drawBotDrawingTool._copy()
# load the instructions
pageTool = DummyDrawBotDrawingTool(self._instructionSet)
# overwrite the globals newPage and size
_drawBotDrawingTool._isSinglePage = True
# reset the existing one, with the page tool
_drawBotDrawingTool._reset(pageTool)
return self
def __exit__(self, type, value, traceback):
# reset the main drawing tool with a saved state of the tool
_drawBotDrawingTool._reset(self._originalTool)
# reset the globals newPage and size
_drawBotDrawingTool._isSinglePage = False
|
core | data_parser | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Interface for data parsers.
Data parser parses input data and returns a dictionary of numpy arrays
keyed by the entries in standard_fields.py. Since the parser parses records
to numpy arrays (materialized tensors) directly, it is used to read data for
evaluation/visualization; to parse the data during training, DataDecoder should
be used.
"""
from abc import ABCMeta, abstractmethod
class DataToNumpyParser(object):
__metaclass__ = ABCMeta
@abstractmethod
def parse(self, input_data):
"""Parses input and returns a numpy array or a dictionary of numpy arrays.
Args:
input_data: an input data
Returns:
A numpy array or a dictionary of numpy arrays or None, if input
cannot be parsed.
"""
pass
|
xlgui | cover | # Copyright (C) 2008-2010 Adam Olsen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
import logging
import os
import os.path
import tempfile
import threading
import cairo
from gi.repository import Gdk, GdkPixbuf, Gio, GLib, GObject, Gtk
from xl import common, event, providers, settings, xdg
from xl.covers import MANAGER as COVER_MANAGER
from xl.nls import gettext as _
from xlgui import guiutil
from xlgui.guiutil import pixbuf_from_data
from xlgui.widgets import dialogs, menu
logger = logging.getLogger(__name__)
def save_pixbuf(pixbuf, path, type_):
"""Save a pixbuf to a local file.
:param pixbuf: Pixbuf to save
:type pixbuf: GdkPixbuf.Pixbuf
:param path: Path of file to save to
:type path: str
:param type_: Type of image file. See GdkPixbuf.savev for valid values.
:type type_: str
:return: None
"""
# This wraps the horrible GdkPixbuf.savev API. Can be removed if one day
# PyGObject provides an override.
pixbuf.savev(path, type_, [None], [])
class CoverManager(GObject.GObject):
"""
Cover manager window
"""
__gsignals__ = {
"prefetch-started": (GObject.SignalFlags.RUN_LAST, None, ()),
"prefetch-progress": (GObject.SignalFlags.RUN_LAST, None, (GObject.TYPE_INT,)),
"prefetch-completed": (GObject.SignalFlags.RUN_LAST, None, (GObject.TYPE_INT,)),
"fetch-started": (GObject.SignalFlags.RUN_LAST, None, (GObject.TYPE_INT,)),
"fetch-completed": (GObject.SignalFlags.RUN_LAST, None, (GObject.TYPE_INT,)),
"fetch-progress": (GObject.SignalFlags.RUN_LAST, None, (GObject.TYPE_INT,)),
"cover-fetched": (
GObject.SignalFlags.RUN_LAST,
None,
(GObject.TYPE_PYOBJECT, GdkPixbuf.Pixbuf),
),
}
def __init__(self, parent, collection):
"""
Initializes the window
"""
GObject.GObject.__init__(self)
# List of identifiers of albums without covers
self.outstanding = []
# Map of album identifiers and their tracks
self.album_tracks = {}
self.outstanding_text = _("{outstanding} covers left to fetch")
self.completed_text = _("All covers fetched")
self.cover_size = (90, 90)
self.default_cover_pixbuf = pixbuf_from_data(
COVER_MANAGER.get_default_cover(), self.cover_size
)
builder = guiutil.get_builder(xdg.get_data_path("ui", "covermanager.ui"))
builder.connect_signals(self)
self.window = builder.get_object("window")
self.window.set_transient_for(parent)
self.message = dialogs.MessageBar(
parent=builder.get_object("content_area"), buttons=Gtk.ButtonsType.CLOSE
)
self.previews_box = builder.get_object("previews_box")
self.model = builder.get_object("covers_model")
# Map of album identifiers and model paths
self.model_path_cache = {}
self.menu = CoverMenu(self)
self.menu.attach_to_widget(self.previews_box, lambda menu, widget: True)
self.progress_bar = builder.get_object("progressbar")
self.progress_bar.set_text(_("Collecting albums and covers..."))
self.progress_bar.pulse_timeout = GLib.timeout_add(
100, self.on_progress_pulse_timeout
)
self.close_button = builder.get_object("close_button")
self.stop_button = builder.get_object("stop_button")
self.stop_button.set_sensitive(False)
self.fetch_button = builder.get_object("fetch_button")
self.window.show_all()
self.stopper = threading.Event()
thread = threading.Thread(
target=self.prefetch, name="CoverPrefetch", args=(collection,)
)
thread.daemon = True
thread.start()
def prefetch(self, collection):
"""
Collects all albums and sets the list of outstanding items
"""
albums = set()
for track in collection:
if self.stopper.is_set():
return
try:
artist = track.get_tag_raw("artist")[0]
album = track.get_tag_raw("album")[0]
except TypeError:
continue
if not album or not artist:
continue
album = (artist, album)
try:
self.album_tracks[album].append(track)
except KeyError:
self.album_tracks[album] = [track]
albums.add(album)
albums = sorted(albums)
outstanding = []
# Speed up the following loop
get_cover = COVER_MANAGER.get_cover
default_cover_pixbuf = self.default_cover_pixbuf
cover_size = self.cover_size
self.emit("prefetch-started")
for i, album in enumerate(albums):
if self.stopper.is_set():
return
cover_data = get_cover(self.album_tracks[album][0], set_only=True)
cover_pixbuf = pixbuf_from_data(cover_data) if cover_data else None
try:
thumbnail_pixbuf = cover_pixbuf.scale_simple(
*cover_size, interp_type=GdkPixbuf.InterpType.BILINEAR
)
except AttributeError: # cover_pixbuf is None
thumbnail_pixbuf = default_cover_pixbuf
outstanding.append(album)
label = "{0} - {1}".format(*album)
iter = self.model.append((album, thumbnail_pixbuf, label))
self.model_path_cache[album] = self.model.get_path(iter)
self.emit("prefetch-progress", i + 1)
self.outstanding = outstanding
self.emit("prefetch-completed", len(self.outstanding))
def fetch(self):
"""
Collects covers for all outstanding items
"""
self.emit("fetch-started", len(self.outstanding))
# Speed up the following loop
get_cover = COVER_MANAGER.get_cover
save = COVER_MANAGER.save
for i, album in enumerate(self.outstanding[:]):
if self.stopper.is_set():
# Allow for "fetch-completed" signal to be emitted
break
cover_data = get_cover(self.album_tracks[album][0], save_cover=True)
cover_pixbuf = pixbuf_from_data(cover_data) if cover_data else None
self.emit("fetch-progress", i + 1)
if not cover_pixbuf:
continue
self.outstanding.remove(album)
self.emit("cover-fetched", album, cover_pixbuf)
if i % 50 == 0:
logger.debug("Saving cover database")
save()
logger.debug("Saving cover database")
save()
self.emit("fetch-completed", len(self.outstanding))
def show_cover(self):
"""
Shows the currently selected cover
"""
paths = self.previews_box.get_selected_items()
if paths:
path = paths[0]
album = self.model[path][0]
track = self.album_tracks[album][0] # Arbitrary track in album
cover_data = COVER_MANAGER.get_cover(track, set_only=True)
cover_pixbuf = pixbuf_from_data(cover_data) if cover_data else None
# Do not bother showing the dialog if there is no cover
if cover_pixbuf:
savedir = Gio.File.new_for_uri(track.get_loc_for_io()).get_parent()
if savedir:
savedir = savedir.get_path()
cover_window = CoverWindow(self.window, cover_pixbuf, album[1], savedir)
cover_window.show_all()
def fetch_cover(self):
"""
Shows the cover chooser for the currently selected album
"""
paths = self.previews_box.get_selected_items()
if paths:
path = paths[0]
album = self.model[path][0]
track = self.album_tracks[album][0]
cover_chooser = CoverChooser(self.window, track)
# Make sure we're updating the correct album after selection
cover_chooser.path = path
cover_chooser.connect("cover-chosen", self.on_cover_chosen)
def remove_cover(self):
"""
Removes the cover of the currently selected album
"""
paths = self.previews_box.get_selected_items()
if paths:
path = paths[0]
album = self.model[path][0]
track = self.album_tracks[album][0]
COVER_MANAGER.remove_cover(track)
self.model[path][1] = self.default_cover_pixbuf
@common.idle_add()
def do_prefetch_started(self):
"""
Sets the widget states to prefetching
"""
self.previews_box.set_model(None)
self.model.clear()
self.previews_box.set_sensitive(False)
self.fetch_button.set_sensitive(False)
self.progress_bar.set_fraction(0)
GLib.source_remove(self.progress_bar.pulse_timeout)
@common.idle_add()
def do_prefetch_completed(self, outstanding):
"""
Sets the widget states to ready for fetching
"""
self.previews_box.set_sensitive(True)
self.previews_box.set_model(self.model)
self.fetch_button.set_sensitive(True)
self.progress_bar.set_fraction(0)
self.progress_bar.set_text(
self.outstanding_text.format(outstanding=outstanding)
)
@common.idle_add()
def do_prefetch_progress(self, progress):
"""
Updates the wiedgets to reflect the processed album
"""
fraction = progress / float(len(self.album_tracks))
self.progress_bar.set_fraction(fraction)
@common.idle_add()
def do_fetch_started(self, outstanding):
"""
Sets the widget states to fetching
"""
self.previews_box.set_sensitive(False)
self.stop_button.set_sensitive(True)
self.fetch_button.set_sensitive(False)
self.progress_bar.set_fraction(0)
# We need float for the fraction during progress
self.progress_bar.outstanding_total = float(outstanding)
@common.idle_add()
def do_fetch_completed(self, outstanding):
"""
Sets the widget states to ready for fetching
"""
self.previews_box.set_sensitive(True)
self.stop_button.set_sensitive(False)
if outstanding > 0:
# If there are covers left for some reason, allow re-fetch
self.fetch_button.set_sensitive(True)
self.progress_bar.set_fraction(0)
@common.idle_add()
def do_fetch_progress(self, progress):
"""
Updates the widgets to reflect the processed album
"""
outstanding = len(self.outstanding)
if outstanding > 0:
progress_text = self.outstanding_text.format(outstanding=outstanding)
else:
progress_text = self.completed_text
self.progress_bar.set_text(progress_text)
fraction = progress / self.progress_bar.outstanding_total
self.progress_bar.set_fraction(fraction)
@common.idle_add()
def do_cover_fetched(self, album, pixbuf):
"""
Updates the widgets to reflect the newly fetched cover
"""
path = self.model_path_cache[album]
self.model[path][1] = pixbuf.scale_simple(
*self.cover_size, interp_type=GdkPixbuf.InterpType.BILINEAR
)
def on_cover_chosen(self, cover_chooser, track, cover_data):
"""
Updates the cover of the current album after user selection
"""
path = cover_chooser.path
if path:
album = self.model[path][0]
pixbuf = pixbuf_from_data(cover_data)
self.emit("cover-fetched", album, pixbuf)
try:
self.outstanding.remove(album)
except ValueError:
pass
else:
outstanding = len(self.outstanding)
if outstanding > 0:
progress_text = self.outstanding_text.format(
outstanding=outstanding
)
else:
progress_text = self.completed_text
self.progress_bar.set_text(progress_text)
def on_previews_box_item_activated(self, iconview, path):
"""
Shows the currently selected cover
"""
self.show_cover()
def on_previews_box_button_press_event(self, widget, e):
"""
Shows the cover menu upon click
"""
path = self.previews_box.get_path_at_pos(int(e.x), int(e.y))
if path:
self.previews_box.select_path(path)
if e.triggers_context_menu():
self.menu.popup(None, None, None, None, 3, e.time)
def on_previews_box_popup_menu(self, menu):
"""
Shows the cover menu upon keyboard interaction
"""
paths = self.previews_box.get_selected_items()
if paths:
self.menu.popup(None, None, None, None, 0, Gtk.get_current_event_time())
def on_previews_box_query_tooltip(self, widget, x, y, keyboard_mode, tooltip):
"""
Custom tooltip display to prevent markup errors
(e.g. due to album names containing "<")
"""
x, y = self.previews_box.convert_widget_to_bin_window_coords(x, y)
path = self.previews_box.get_path_at_pos(x, y)
if path:
tooltip.set_text(self.model[path][2])
self.previews_box.set_tooltip_item(tooltip, path)
return True
return False
def on_progress_pulse_timeout(self):
"""
Updates the progress during prefetching
"""
self.progress_bar.pulse()
return True
def on_close_button_clicked(self, button):
"""
Stops the current fetching process and closes the dialog
"""
self.stopper.set()
self.window.destroy()
# Free some memory
self.model.clear()
del self.outstanding
del self.album_tracks
del self.model_path_cache
def on_stop_button_clicked(self, button):
"""
Stops the current fetching process
"""
self.stopper.set()
def on_fetch_button_clicked(self, button):
"""
Starts the cover fetching process
"""
self.stopper.clear()
thread = threading.Thread(target=self.fetch, name="CoverFetch")
thread.daemon = True
thread.start()
def on_window_delete_event(self, window, e):
"""
Stops the current fetching process and closes the dialog
"""
self.close_button.clicked()
return True
class CoverMenu(menu.Menu):
"""
Cover menu
"""
def __init__(self, widget):
"""
Initializes the menu
"""
menu.Menu.__init__(self, widget)
self.w = widget
self.add_simple(_("Show Cover"), self.on_show_clicked)
self.add_simple(_("Fetch Cover"), self.on_fetch_clicked)
self.add_simple(_("Remove Cover"), self.on_remove_clicked)
def on_show_clicked(self, *e):
"""
Shows the current cover
"""
self.w.show_cover()
def on_fetch_clicked(self, *e):
self.w.fetch_cover()
def on_remove_clicked(self, *e):
self.w.remove_cover()
class CoverWidget(Gtk.EventBox):
"""
Represents the cover widget displayed by the track information
"""
__gsignals__ = {"cover-found": (GObject.SignalFlags.RUN_LAST, None, (object,))}
def __init__(self, image):
"""
Initializes the widget
:param image: the image to wrap
:type image: :class:`Gtk.Image`
"""
GObject.GObject.__init__(self)
self.image = image
self.cover_data = None
self.menu = CoverMenu(self)
self.menu.attach_to_widget(self)
self.filename = None
guiutil.gtk_widget_replace(image, self)
self.add(self.image)
self.set_track(None)
self.image.show()
event.add_callback(self.on_quit_application, "quit_application")
if settings.get_option("gui/use_alpha", False):
self.set_app_paintable(True)
def destroy(self):
"""
Cleanups
"""
if self.filename is not None and os.path.exists(self.filename):
os.remove(self.filename)
self.filename = None
event.remove_callback(self.on_quit_application, "quit-application")
def set_track(self, track):
"""
Fetches album covers, and displays them
"""
self.__track = track
self.set_blank()
self.drag_dest_set(
Gtk.DestDefaults.ALL,
[Gtk.TargetEntry.new("text/uri-list", 0, 0)],
Gdk.DragAction.COPY | Gdk.DragAction.DEFAULT | Gdk.DragAction.MOVE,
)
@common.threaded
def __get_cover():
fetch = not settings.get_option("covers/automatic_fetching", True)
cover_data = COVER_MANAGER.get_cover(track, set_only=fetch)
if not cover_data:
return
GLib.idle_add(self.on_cover_chosen, None, track, cover_data)
if track is not None:
__get_cover()
def show_cover(self):
"""
Shows the current cover
"""
if not self.cover_data:
return
pixbuf = pixbuf_from_data(self.cover_data)
if pixbuf:
savedir = Gio.File.new_for_uri(self.__track.get_loc_for_io()).get_parent()
if savedir:
savedir = savedir.get_path()
window = CoverWindow(
self.get_toplevel(),
pixbuf,
self.__track.get_tag_display("album"),
savedir,
)
window.show_all()
def fetch_cover(self):
"""
Fetches a cover for the current track
"""
if not self.__track:
return
window = CoverChooser(self.get_toplevel(), self.__track)
window.connect("cover-chosen", self.on_cover_chosen)
def remove_cover(self):
"""
Removes the cover for the current track from the database
"""
COVER_MANAGER.remove_cover(self.__track)
self.set_blank()
def set_blank(self):
"""
Sets the default cover to display
"""
self.drag_dest_unset()
pixbuf = pixbuf_from_data(COVER_MANAGER.get_default_cover())
self.image.set_from_pixbuf(pixbuf)
self.set_drag_source_enabled(False)
self.cover_data = None
self.emit("cover-found", None)
def set_drag_source_enabled(self, enabled):
"""
Changes the behavior for drag and drop
:param drag_enabled: Whether to allow
drag to other applications
:type enabled: bool
"""
if enabled == getattr(self, "__drag_source_enabled", None):
return
if enabled:
self.drag_source_set(
Gdk.ModifierType.BUTTON1_MASK,
[Gtk.TargetEntry.new("text/uri-list", 0, 0)],
Gdk.DragAction.DEFAULT | Gdk.DragAction.MOVE,
)
else:
self.drag_source_unset()
self.__drag_source_enabled = enabled
def do_button_press_event(self, event):
"""
Called when someone clicks on the cover widget
"""
if self.__track is None or self.get_toplevel() is None:
return
if event.type == Gdk.EventType._2BUTTON_PRESS:
self.show_cover()
elif event.triggers_context_menu():
self.menu.popup(event)
def do_expose_event(self, event):
"""
Paints alpha transparency
"""
opacity = 1 - settings.get_option("gui/transparency", 0.3)
context = self.props.window.cairo_create()
background = self.style.bg[Gtk.StateType.NORMAL]
context.set_source_rgba(
float(background.red) / 256**2,
float(background.green) / 256**2,
float(background.blue) / 256**2,
opacity,
)
context.set_operator(cairo.OPERATOR_SOURCE)
context.paint()
Gtk.EventBox.do_expose_event(self, event)
def do_drag_begin(self, context):
"""
Sets the cover as drag icon
"""
self.drag_source_set_icon_pixbuf(self.image.get_pixbuf())
def do_drag_data_get(self, context, selection, info, time):
"""
Fills the selection with the current cover
"""
if self.filename is None:
self.filename = tempfile.mkstemp(prefix="exaile_cover_")[1]
pixbuf = pixbuf_from_data(self.cover_data)
save_pixbuf(pixbuf, self.filename, "png")
selection.set_uris([Gio.File.new_for_path(self.filename).get_uri()])
def do_drag_data_delete(self, context):
"""
Cleans up after drag from cover widget
"""
if self.filename is not None and os.path.exists(self.filename):
os.remove(self.filename)
self.filename = None
def do_drag_data_received(self, context, x, y, selection, info, time):
"""
Sets the cover based on the dragged data
"""
if self.__track is not None:
uri = selection.get_uris()[0]
db_string = "localfile:%s" % uri
try:
stream = Gio.File.new_for_uri(uri).read()
except GLib.Error:
return
self.cover_data = stream.read()
width = settings.get_option("gui/cover_width", 100)
pixbuf = pixbuf_from_data(self.cover_data, (width, width))
if pixbuf is not None:
self.image.set_from_pixbuf(pixbuf)
COVER_MANAGER.set_cover(self.__track, db_string, self.cover_data)
def on_cover_chosen(self, object, track, cover_data):
"""
Called when a cover is selected
from the coverchooser
"""
if self.__track != track:
return
width = settings.get_option("gui/cover_width", 100)
pixbuf = pixbuf_from_data(cover_data, (width, width))
self.image.set_from_pixbuf(pixbuf)
self.set_drag_source_enabled(True)
self.cover_data = cover_data
self.emit("cover-found", pixbuf)
def on_track_tags_changed(self, e, track, tags):
"""
Updates the displayed cover upon tag changes
"""
if self.__track == track:
cover_data = COVER_MANAGER.get_cover(track)
if not cover_data:
return
GLib.idle_add(self.on_cover_chosen, None, cover_data)
def on_quit_application(self, type, exaile, nothing):
"""
Cleans up temporary files
"""
if self.filename is not None and os.path.exists(self.filename):
os.remove(self.filename)
self.filename = None
class CoverWindow:
"""Shows the cover in a simple image viewer"""
def __init__(self, parent, pixbuf, album=None, savedir=None):
"""Initializes and shows the cover
:param parent: Parent window to attach to
:type parent: Gtk.Window
:param pixbuf: Pixbuf of the cover image
:type pixbuf: GdkPixbuf.Pixbuf
:param album: Album title
:type album: basestring
:param savedir: Initial directory for the Save As functionality
:type savedir: basestring
"""
self.builder = guiutil.get_builder(xdg.get_data_path("ui", "coverwindow.ui"))
self.builder.connect_signals(self)
self.cover_window = self.builder.get_object("CoverWindow")
self.layout = self.builder.get_object("layout")
self.toolbar = self.builder.get_object("toolbar")
self.save_as_button = self.builder.get_object("save_as_button")
self.zoom_in_button = self.builder.get_object("zoom_in_button")
self.zoom_out_button = self.builder.get_object("zoom_out_button")
self.zoom_100_button = self.builder.get_object("zoom_100_button")
self.zoom_fit_button = self.builder.get_object("zoom_fit_button")
self.close_button = self.builder.get_object("close_button")
self.image = self.builder.get_object("image")
self.statusbar = self.builder.get_object("statusbar")
self.scrolledwindow = self.builder.get_object("scrolledwindow")
self.scrolledwindow.set_hadjustment(self.layout.get_hadjustment())
self.scrolledwindow.set_vadjustment(self.layout.get_vadjustment())
if album:
title = _("Cover for %s") % album
else:
title = _("Cover")
self.savedir = savedir
self.cover_window.set_title(title)
self.cover_window.set_transient_for(parent)
self.cover_window_width = 500
tb_min_height, tb_natural_height = self.toolbar.get_preferred_height()
sb_min_height, sb_natural_height = self.statusbar.get_preferred_height()
self.cover_window_height = 500 + tb_natural_height + sb_natural_height
self.cover_window.set_default_size(
self.cover_window_width, self.cover_window_height
)
self.image_original_pixbuf = pixbuf
self.image_pixbuf = self.image_original_pixbuf
self.min_percent = 1
self.max_percent = 500
self.ratio = 1.5
self.image_interp = GdkPixbuf.InterpType.BILINEAR
self.image_fitted = True
self.set_ratio_to_fit()
self.update_widgets()
def show_all(self):
self.cover_window.show_all()
def available_image_width(self):
"""Returns the available horizontal space for the image"""
return self.cover_window.get_size()[0]
def available_image_height(self):
"""Returns the available vertical space for the image"""
tb_min_height, tb_natural_height = self.toolbar.get_preferred_height()
sb_min_height, sb_natural_height = self.statusbar.get_preferred_height()
return self.cover_window.get_size()[1] - tb_natural_height - sb_natural_height
def center_image(self):
"""Centers the image in the layout"""
new_x = max(
0, (self.available_image_width() - self.image_pixbuf.get_width()) // 2
)
new_y = max(
0, (self.available_image_height() - self.image_pixbuf.get_height()) // 2
)
self.layout.move(self.image, new_x, new_y)
def update_widgets(self):
"""Updates image, layout, scrolled window, tool bar and status bar"""
window = self.cover_window.get_window()
if window:
window.freeze_updates()
self.apply_zoom()
self.layout.set_size(
self.image_pixbuf.get_width(), self.image_pixbuf.get_height()
)
if self.image_fitted or (
self.image_pixbuf.get_width() == self.available_image_width()
and self.image_pixbuf.get_height() == self.available_image_height()
):
self.scrolledwindow.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.NEVER)
else:
self.scrolledwindow.set_policy(
Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC
)
percent = int(100 * self.image_ratio)
message = _("{width}x{height} pixels ({zoom}%)").format(
width=self.image_original_pixbuf.get_width(),
height=self.image_original_pixbuf.get_height(),
zoom=percent,
)
self.zoom_in_button.set_sensitive(percent < self.max_percent)
self.zoom_out_button.set_sensitive(percent > self.min_percent)
self.statusbar.pop(self.statusbar.get_context_id(""))
self.statusbar.push(self.statusbar.get_context_id(""), message)
self.image.set_from_pixbuf(self.image_pixbuf)
self.center_image()
if window:
window.thaw_updates()
def apply_zoom(self):
"""Scales the image if needed"""
new_width = int(self.image_original_pixbuf.get_width() * self.image_ratio)
new_height = int(self.image_original_pixbuf.get_height() * self.image_ratio)
if (
new_width != self.image_pixbuf.get_width()
or new_height != self.image_pixbuf.get_height()
):
self.image_pixbuf = self.image_original_pixbuf.scale_simple(
new_width, new_height, self.image_interp
)
def set_ratio_to_fit(self):
"""Calculates and sets the needed ratio to show the full image"""
width_ratio = (
float(self.image_original_pixbuf.get_width()) / self.available_image_width()
)
height_ratio = (
float(self.image_original_pixbuf.get_height())
/ self.available_image_height()
)
self.image_ratio = 1 / max(1, width_ratio, height_ratio)
def on_key_press(self, widget, event, data=None):
"""
Closes the cover window when Escape or Ctrl+W is pressed
"""
if event.keyval == Gdk.KEY_Escape or (
event.state & Gdk.ModifierType.CONTROL_MASK and event.keyval == Gdk.KEY_w
):
widget.destroy()
def on_save_as_button_clicked(self, widget):
"""
Saves image to user-specified location
"""
dialog = Gtk.FileChooserDialog(
_("Save File"),
self.cover_window,
Gtk.FileChooserAction.SAVE,
(
Gtk.STOCK_CANCEL,
Gtk.ResponseType.CANCEL,
Gtk.STOCK_SAVE,
Gtk.ResponseType.ACCEPT,
),
)
names = settings.get_option("covers/localfile/preferred_names")
filename = (names[0] if names else "cover") + ".png"
dialog.set_current_name(filename)
if self.savedir:
dialog.set_current_folder(self.savedir)
if dialog.run() == Gtk.ResponseType.ACCEPT:
filename = dialog.get_filename()
lowfilename = filename.lower()
if lowfilename.endswith(".jpg") or lowfilename.endswith(".jpeg"):
type_ = "jpeg"
else:
type_ = "png"
save_pixbuf(self.image_pixbuf, filename, type_)
dialog.destroy()
def on_zoom_in_button_clicked(self, widget):
"""
Zooms into the image
"""
self.image_fitted = False
self.image_ratio *= self.ratio
self.update_widgets()
def on_zoom_out_button_clicked(self, widget):
"""
Zooms out of the image
"""
self.image_fitted = False
self.image_ratio *= 1 / self.ratio
self.update_widgets()
def on_zoom_100_button_clicked(self, widget):
"""
Restores the original image zoom
"""
self.image_fitted = False
self.image_ratio = 1
self.update_widgets()
def on_zoom_fit_button_clicked(self, widget):
"""
Zooms the image to fit the window width
"""
self.image_fitted = True
self.set_ratio_to_fit()
self.update_widgets()
def on_close_button_clicked(self, widget):
"""
Hides the window
"""
self.cover_window.hide()
def cover_window_size_allocate(self, widget, allocation):
if (
self.cover_window_width != allocation.width
or self.cover_window_height != allocation.height
):
if self.image_fitted:
self.set_ratio_to_fit()
self.update_widgets()
self.cover_window_width = allocation.width
self.cover_window_height = allocation.height
class CoverChooser(GObject.GObject):
"""
Fetches all album covers for a string, and allows the user to choose
one out of the list
"""
__gsignals__ = {
"covers-fetched": (GObject.SignalFlags.RUN_LAST, None, (object,)),
"cover-chosen": (GObject.SignalFlags.RUN_LAST, None, (object, object)),
}
def __init__(self, parent, track, search=None):
"""
Expects the parent control, a track, an an optional search string
"""
GObject.GObject.__init__(self)
self.parent = parent
self.builder = guiutil.get_builder(xdg.get_data_path("ui", "coverchooser.ui"))
self.builder.connect_signals(self)
self.window = self.builder.get_object("CoverChooser")
self.window.set_title(
_("Cover options for %(artist)s - %(album)s")
% {
"artist": track.get_tag_display("artist"),
"album": track.get_tag_display("album"),
}
)
self.window.set_transient_for(parent)
self.message = dialogs.MessageBar(
parent=self.builder.get_object("main_container"),
buttons=Gtk.ButtonsType.CLOSE,
)
self.message.connect("response", self.on_message_response)
self.track = track
self.covers = []
self.current = 0
self.cover = guiutil.ScalableImageWidget()
self.cover.set_image_size(350, 350)
self.cover_image_box = self.builder.get_object("cover_image_box")
self.stack = self.builder.get_object("stack")
self.stack_ready = self.builder.get_object("stack_ready")
self.size_label = self.builder.get_object("size_label")
self.source_label = self.builder.get_object("source_label")
self.covers_model = self.builder.get_object("covers_model")
self.previews_box = self.builder.get_object("previews_box")
self.previews_box.set_no_show_all(True)
self.previews_box.hide()
self.previews_box.set_model(None)
self.set_button = self.builder.get_object("set_button")
self.set_button.set_sensitive(False)
self.window.show_all()
self.stopper = threading.Event()
self.fetcher_thread = threading.Thread(
target=self.fetch_cover, name="Coverfetcher"
)
self.fetcher_thread.start()
def fetch_cover(self):
"""
Searches for covers for the current track
"""
db_strings = COVER_MANAGER.find_covers(self.track)
if db_strings:
for db_string in db_strings:
if self.stopper.is_set():
return
coverdata = COVER_MANAGER.get_cover_data(db_string)
# Pre-render everything for faster display later
pixbuf = pixbuf_from_data(coverdata)
if pixbuf:
self.covers_model.append(
[
(db_string, coverdata),
pixbuf,
pixbuf.scale_simple(50, 50, GdkPixbuf.InterpType.BILINEAR),
]
)
self.emit("covers-fetched", db_strings)
def do_covers_fetched(self, db_strings):
"""
Finishes the dialog setup after all covers have been fetched
"""
if self.stopper.is_set():
return
self.stack.set_visible_child(self.stack_ready)
self.previews_box.set_model(self.covers_model)
if db_strings:
self.cover_image_box.pack_start(self.cover, True, True, 0)
self.cover.show()
self.set_button.set_sensitive(True)
# Show thumbnail bar if more than one cover was found
if len(db_strings) > 1:
self.previews_box.set_no_show_all(False)
self.previews_box.show_all()
# Try to select the current cover of the track, fallback to first
track_db_string = COVER_MANAGER.get_db_string(self.track)
position = (
db_strings.index(track_db_string)
if track_db_string in db_strings
else 0
)
self.previews_box.select_path(Gtk.TreePath(position))
else:
self.builder.get_object("stack").hide()
self.builder.get_object("actions_box").hide()
self.message.show_warning(
_("No covers found."),
_(
"None of the enabled sources has a cover for this track, try enabling more sources."
),
)
def on_cancel_button_clicked(self, button):
"""
Closes the cover chooser
"""
# Notify the fetcher thread to stop
self.stopper.set()
self.window.destroy()
def on_set_button_clicked(self, button):
"""
Chooses the current cover and saves it to the database
"""
paths = self.previews_box.get_selected_items()
if paths:
path = paths[0]
coverdata = self.covers_model[path][0]
COVER_MANAGER.set_cover(self.track, coverdata[0], coverdata[1])
self.emit("cover-chosen", self.track, coverdata[1])
self.window.destroy()
def on_previews_box_selection_changed(self, iconview):
"""
Switches the currently displayed cover
"""
paths = self.previews_box.get_selected_items()
if paths:
path = paths[0]
db_string = self.covers_model[path][0]
source = db_string[0].split(":", 1)[0]
provider = providers.get_provider("covers", source)
pixbuf = self.covers_model[path][1]
self.cover.set_image_pixbuf(pixbuf)
self.size_label.set_text(
_("{width}x{height} pixels").format(
width=pixbuf.get_width(), height=pixbuf.get_height()
)
)
# Display readable title of the provider, fallback to its name
self.source_label.set_text(getattr(provider, "title", source))
self.set_button.set_sensitive(True)
else:
self.set_button.set_sensitive(False)
def on_previews_box_item_activated(self, iconview, path):
"""
Triggers selecting the current cover
"""
self.set_button.clicked()
def on_message_response(self, widget, response):
"""
Handles the response for closing
"""
if response == Gtk.ResponseType.CLOSE:
self.window.destroy()
|
PyObjCTest | test_nstableview | from AppKit import *
from PyObjCTools.TestSupport import *
try:
unicode
except NameError:
unicode = str
class TestNSTableViewHelper(NSObject):
def tableView_viewForTableColumn_row_(self, a, b, c):
pass
def tableView_rowViewForRow_(self, a, b):
pass
def tableView_didAddRowView_forRow_(self, a, b, c):
pass
def tableView_didRemoveRowView_forRow_(self, a, b, c):
pass
def tableView_pastboardWriterForRow_(self, a, b):
pass
def tableView_draggingSession_willBeginAtPoint_forRowIndexes_(self, a, b, c, d):
pass
def tableView_draggingSession_endedAtPoint_operation_(self, a, b, c, d):
pass
def numberOfRowsInTableView_(self, tv):
return 1
def tableView_objectValueForTableColumn_row_(self, tv, c, r):
return 1
def tableView_setObjectValue_forTableColumn_row_(self, o, tv, c, r):
pass
def tableView_writeRowsWithIndexes_toPasteboard_(self, tv, r, p):
return 1
def tableView_validateDrop_proposedRow_proposedDropOperation_(self, tv, dr, r, o):
return 1
def tableView_acceptDrop_row_dropOperation_(self, tv, dr, r, o):
return 1
def tableView_writeRows_toPasteboard_(self, tv, r, p):
return 1
def tableView_willDisplayCell_forTableColumn_row_(self, tv, c, tc, r):
return 1
def tableView_shouldEditTableColumn_row_(self, tv, tc, r):
return 1
def selectionShouldChangeInTableView_(self, tv):
return 1
def tableView_shouldSelectTableColumn_(self, tv, tc):
return 1
def tableView_toolTipForCell_rect_tableColumn_row_mouseLocation_(
self, tv, c, re, tc, r, l
):
return 1
def tableView_heightOfRow_(self, tv, r):
return 1
def tableView_typeSelectStringForTableColumn_row_(self, tv, tc, r):
return 1
def tableView_nextTypeSelectMatchFromRow_toRow_forString_(self, tv, r1, r2, s):
return 1
def tableView_shouldTypeSelectForEvent_withCurrentSearchString_(self, tv, e, s):
return 1
def tableView_shouldShowCellExpansionForTableColumn_row_(self, tv, tc, r):
return 1
def tableView_shouldTrackCell_forTableColumn_row_(self, tv, c, tc, r):
return 1
def tableView_dataCellForTableColumn_row_(self, tv, tc, r):
return 1
def tableView_isGroupRow_(self, tv, r):
return 1
def tableView_sizeToFitWidthOfColumn_(self, tv, c):
return 1
def tableView_shouldReorderColumn_toColumn_(self, tv, c1, c2):
return 1
class TestNSTableView(TestCase):
def testConstants(self):
self.assertEqual(NSTableViewDropOn, 0)
self.assertEqual(NSTableViewDropAbove, 1)
self.assertEqual(NSTableViewNoColumnAutoresizing, 0)
self.assertEqual(NSTableViewUniformColumnAutoresizingStyle, 1)
self.assertEqual(NSTableViewSequentialColumnAutoresizingStyle, 2)
self.assertEqual(NSTableViewReverseSequentialColumnAutoresizingStyle, 3)
self.assertEqual(NSTableViewLastColumnOnlyAutoresizingStyle, 4)
self.assertEqual(NSTableViewFirstColumnOnlyAutoresizingStyle, 5)
self.assertEqual(NSTableViewGridNone, 0)
self.assertEqual(NSTableViewSolidVerticalGridLineMask, 1 << 0)
self.assertEqual(NSTableViewSolidHorizontalGridLineMask, 1 << 1)
self.assertEqual(NSTableViewSelectionHighlightStyleRegular, 0)
self.assertEqual(NSTableViewSelectionHighlightStyleSourceList, 1)
self.assertIsInstance(NSTableViewSelectionDidChangeNotification, unicode)
self.assertIsInstance(NSTableViewColumnDidMoveNotification, unicode)
self.assertIsInstance(NSTableViewColumnDidResizeNotification, unicode)
self.assertIsInstance(NSTableViewSelectionIsChangingNotification, unicode)
@min_os_level("10.6")
def testConstants10_6(self):
self.assertEqual(NSTableViewSelectionHighlightStyleNone, -1)
self.assertEqual(NSTableViewDraggingDestinationFeedbackStyleNone, -1)
self.assertEqual(NSTableViewDraggingDestinationFeedbackStyleRegular, 0)
self.assertEqual(NSTableViewDraggingDestinationFeedbackStyleSourceList, 1)
@min_os_level("10.7")
def testConstants10_7(self):
self.assertEqual(NSTableViewDashedHorizontalGridLineMask, 1 << 3)
self.assertEqual(NSTableViewRowSizeStyleDefault, -1)
self.assertEqual(NSTableViewRowSizeStyleCustom, 0)
self.assertEqual(NSTableViewRowSizeStyleSmall, 1)
self.assertEqual(NSTableViewRowSizeStyleMedium, 2)
self.assertEqual(NSTableViewRowSizeStyleLarge, 3)
self.assertEqual(NSTableViewAnimationEffectNone, 0)
self.assertEqual(NSTableViewAnimationEffectFade, 1)
self.assertEqual(NSTableViewAnimationEffectGap, 2)
self.assertEqual(NSTableViewAnimationSlideUp, 0x10)
self.assertEqual(NSTableViewAnimationSlideDown, 0x20)
self.assertEqual(NSTableViewAnimationSlideLeft, 0x30)
self.assertEqual(NSTableViewAnimationSlideRight, 0x40)
self.assertIsInstance(NSTableViewRowViewKey, unicode)
self.assertEqual(NSTableViewDashedHorizontalGridLineMask, 1 << 3)
def testMethods(self):
self.assertArgIsBOOL(NSTableView.setAllowsColumnReordering_, 0)
self.assertResultIsBOOL(NSTableView.allowsColumnReordering)
self.assertArgIsBOOL(NSTableView.setAllowsColumnResizing_, 0)
self.assertResultIsBOOL(NSTableView.allowsColumnResizing)
self.assertArgIsBOOL(NSTableView.setUsesAlternatingRowBackgroundColors_, 0)
self.assertResultIsBOOL(NSTableView.usesAlternatingRowBackgroundColors)
self.assertArgIsBOOL(NSTableView.setVerticalMotionCanBeginDrag_, 0)
self.assertResultIsBOOL(NSTableView.verticalMotionCanBeginDrag)
self.assertResultIsBOOL(NSTableView.canDragRowsWithIndexes_atPoint_)
self.assertArgIsInOut(
NSTableView.dragImageForRowsWithIndexes_tableColumns_event_offset_, 3
)
self.assertArgIsBOOL(NSTableView.setDraggingSourceOperationMask_forLocal_, 1)
self.assertResultIsBOOL(NSTableView.verticalMotionCanBeginDrag)
self.assertArgIsBOOL(NSTableView.setAllowsMultipleSelection_, 0)
self.assertResultIsBOOL(NSTableView.allowsMultipleSelection)
self.assertArgIsBOOL(NSTableView.setAllowsEmptySelection_, 0)
self.assertResultIsBOOL(NSTableView.allowsEmptySelection)
self.assertArgIsBOOL(NSTableView.setAllowsColumnSelection_, 0)
self.assertResultIsBOOL(NSTableView.allowsColumnSelection)
self.assertArgIsBOOL(NSTableView.selectColumnIndexes_byExtendingSelection_, 1)
self.assertArgIsBOOL(NSTableView.selectRowIndexes_byExtendingSelection_, 1)
self.assertResultIsBOOL(NSTableView.isColumnSelected_)
self.assertResultIsBOOL(NSTableView.isRowSelected_)
self.assertResultIsBOOL(NSTableView.textShouldBeginEditing_)
self.assertResultIsBOOL(NSTableView.textShouldEndEditing_)
self.assertArgIsBOOL(NSTableView.setAutosaveTableColumns_, 0)
self.assertResultIsBOOL(NSTableView.autosaveTableColumns)
self.assertArgIsBOOL(NSTableView.editColumn_row_withEvent_select_, 3)
self.assertArgHasType(
NSTableView.drawBackgroundInClipRect_, 0, NSRect.__typestr__
)
self.assertArgIsBOOL(NSTableView.setDrawsGrid_, 0)
self.assertResultIsBOOL(NSTableView.drawsGrid)
self.assertArgIsBOOL(NSTableView.selectColumn_byExtendingSelection_, 1)
self.assertArgIsBOOL(NSTableView.selectRow_byExtendingSelection_, 1)
self.assertArgIsInOut(NSTableView.dragImageForRows_event_dragImageOffset_, 2)
self.assertArgIsBOOL(NSTableView.setAutoresizesAllColumnsToFit_, 0)
self.assertResultIsBOOL(NSTableView.autoresizesAllColumnsToFit)
@min_os_level("10.5")
def testMethods10_5(self):
self.assertArgIsBOOL(NSTableView.setAllowsTypeSelect_, 0)
self.assertResultIsBOOL(NSTableView.allowsTypeSelect)
self.assertArgHasType(NSTableView.columnIndexesInRect_, 0, NSRect.__typestr__)
@min_os_level("10.6")
def testMethods10_6(self):
self.assertResultIsBOOL(NSTableView.shouldFocusCell_atColumn_row_)
@min_os_level("10.7")
def testMethods10_7(self):
self.assertArgIsBOOL(NSTableView.viewAtColumn_row_makeIfNecessary_, 2)
self.assertArgIsBOOL(NSTableView.rowViewAtRow_makeIfNecessary_, 1)
self.assertArgIsBlock(
NSTableView.enumerateAvailableRowViewsUsingBlock_,
0,
b"v@" + objc._C_NSInteger,
)
self.assertResultIsBOOL(NSTableView.floatsGroupRows)
self.assertArgIsBOOL(NSTableView.setFloatsGroupRows_, 0)
def testProtocols(self):
self.assertResultHasType(
TestNSTableViewHelper.numberOfRowsInTableView_, objc._C_NSInteger
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_objectValueForTableColumn_row_,
2,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_setObjectValue_forTableColumn_row_,
3,
objc._C_NSInteger,
)
self.assertResultIsBOOL(
TestNSTableViewHelper.tableView_writeRowsWithIndexes_toPasteboard_
)
self.assertResultHasType(
TestNSTableViewHelper.tableView_validateDrop_proposedRow_proposedDropOperation_,
objc._C_NSUInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_validateDrop_proposedRow_proposedDropOperation_,
2,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_validateDrop_proposedRow_proposedDropOperation_,
3,
objc._C_NSUInteger,
)
self.assertResultIsBOOL(
TestNSTableViewHelper.tableView_acceptDrop_row_dropOperation_
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_acceptDrop_row_dropOperation_,
2,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_acceptDrop_row_dropOperation_,
3,
objc._C_NSUInteger,
)
self.assertResultIsBOOL(TestNSTableViewHelper.tableView_writeRows_toPasteboard_)
self.assertArgHasType(
TestNSTableViewHelper.tableView_willDisplayCell_forTableColumn_row_,
3,
objc._C_NSInteger,
)
self.assertResultIsBOOL(
TestNSTableViewHelper.tableView_shouldEditTableColumn_row_
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_shouldEditTableColumn_row_,
2,
objc._C_NSInteger,
)
self.assertResultIsBOOL(TestNSTableViewHelper.selectionShouldChangeInTableView_)
self.assertResultIsBOOL(
TestNSTableViewHelper.tableView_shouldSelectTableColumn_
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_toolTipForCell_rect_tableColumn_row_mouseLocation_,
2,
b"N^" + NSRect.__typestr__,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_toolTipForCell_rect_tableColumn_row_mouseLocation_,
4,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_toolTipForCell_rect_tableColumn_row_mouseLocation_,
5,
NSPoint.__typestr__,
)
self.assertResultHasType(
TestNSTableViewHelper.tableView_heightOfRow_, objc._C_CGFloat
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_heightOfRow_, 1, objc._C_NSInteger
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_typeSelectStringForTableColumn_row_,
2,
objc._C_NSInteger,
)
self.assertResultHasType(
TestNSTableViewHelper.tableView_nextTypeSelectMatchFromRow_toRow_forString_,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_nextTypeSelectMatchFromRow_toRow_forString_,
1,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_nextTypeSelectMatchFromRow_toRow_forString_,
2,
objc._C_NSInteger,
)
self.assertResultIsBOOL(
TestNSTableViewHelper.tableView_shouldTypeSelectForEvent_withCurrentSearchString_
)
self.assertResultIsBOOL(
TestNSTableViewHelper.tableView_shouldShowCellExpansionForTableColumn_row_
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_shouldShowCellExpansionForTableColumn_row_,
2,
objc._C_NSInteger,
)
self.assertResultIsBOOL(
TestNSTableViewHelper.tableView_shouldTrackCell_forTableColumn_row_
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_shouldTrackCell_forTableColumn_row_,
3,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_dataCellForTableColumn_row_,
2,
objc._C_NSInteger,
)
self.assertResultIsBOOL(TestNSTableViewHelper.tableView_isGroupRow_)
self.assertArgHasType(
TestNSTableViewHelper.tableView_isGroupRow_, 1, objc._C_NSInteger
)
@min_os_level("10.6")
def testProtococols10_6(self):
self.assertResultHasType(
TestNSTableViewHelper.tableView_sizeToFitWidthOfColumn_, objc._C_CGFloat
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_sizeToFitWidthOfColumn_,
1,
objc._C_NSInteger,
)
self.assertResultIsBOOL(
TestNSTableViewHelper.tableView_shouldReorderColumn_toColumn_
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_shouldReorderColumn_toColumn_,
1,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_shouldReorderColumn_toColumn_,
2,
objc._C_NSInteger,
)
@min_os_level("10.7")
def testProtococols10_7(self):
self.assertArgHasType(
TestNSTableViewHelper.tableView_viewForTableColumn_row_,
2,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_rowViewForRow_, 1, objc._C_NSInteger
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_didAddRowView_forRow_, 2, objc._C_NSInteger
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_didRemoveRowView_forRow_,
2,
objc._C_NSInteger,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_pastboardWriterForRow_, 1, objc._C_NSInteger
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_draggingSession_willBeginAtPoint_forRowIndexes_,
2,
NSPoint.__typestr__,
)
self.assertArgHasType(
TestNSTableViewHelper.tableView_draggingSession_endedAtPoint_operation_,
2,
NSPoint.__typestr__,
)
if __name__ == "__main__":
main()
|
flaskbb | deprecation | # -*- coding: utf-8 -*-
"""
flaskbb.deprecation
~~~~~~~~~~~~~~~~~~~
Module used for deprecation handling in FlaskBB
:copyright: (c) 2018 the FlaskBB Team.
:license: BSD, see LICENSE for more details.
"""
import inspect
import warnings
from abc import ABC, abstractproperty
from functools import wraps
from flask_babelplus import gettext as _
class FlaskBBWarning(Warning):
"""
Base class for any warnings that FlaskBB itself needs to issue, provided
for convenient filtering.
"""
pass
class FlaskBBDeprecation(DeprecationWarning, FlaskBBWarning, ABC):
"""
Base class for deprecations originating from FlaskBB, subclasses must
provide a version attribute that represents when deprecation becomes a
removal::
class RemovedInPluginv3(FlaskBBDeprecation):
version = (3, 0, 0)
"""
version = abstractproperty(lambda self: None)
class RemovedInFlaskBB3(FlaskBBDeprecation):
"""
warning for features removed in FlaskBB3
"""
version = (3, 0, 0)
def deprecated(message="", category=RemovedInFlaskBB3):
"""
Flags a function or method as deprecated, should not be used on
classes as it will break inheritance and introspection.
:param message: Optional message to display along with deprecation warning.
:param category: Warning category to use, defaults to RemovedInFlaskBB3,
if provided must be a subclass of FlaskBBDeprecation.
"""
def deprecation_decorator(f):
if not issubclass(category, FlaskBBDeprecation):
raise ValueError(
"Expected subclass of FlaskBBDeprecation for category, got {}".format( # noqa
str(category)
)
)
version = ".".join([str(x) for x in category.version])
warning = _(
"%(name)s is deprecated and will be removed in version %(version)s.", # noqa
name=f.__name__,
version=version,
)
if message:
warning = "{} {}".format(warning, message)
docstring = f.__doc__
if docstring:
docstring = "\n".join([docstring, warning])
else:
docstring = warning
f.__doc__ = docstring
@wraps(f)
def wrapper(*a, **k):
frame = inspect.currentframe().f_back
warnings.warn_explicit(
warning,
category=category,
filename=inspect.getfile(frame.f_code),
lineno=frame.f_lineno,
)
return f(*a, **k)
return wrapper
return deprecation_decorator
|
extractor | sexu | from __future__ import unicode_literals
from .common import InfoExtractor
class SexuIE(InfoExtractor):
_VALID_URL = r"https?://(?:www\.)?sexu\.com/(?P<id>\d+)"
_TEST = {
"url": "http://sexu.com/961791/",
"md5": "ff615aca9691053c94f8f10d96cd7884",
"info_dict": {
"id": "961791",
"ext": "mp4",
"title": "md5:4d05a19a5fc049a63dbbaf05fb71d91b",
"description": "md5:2b75327061310a3afb3fbd7d09e2e403",
"categories": list, # NSFW
"thumbnail": r"re:https?://.*\.jpg$",
"age_limit": 18,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
jwvideo = self._parse_json(
self._search_regex(r"\.setup\(\s*({.+?})\s*\);", webpage, "jwvideo"),
video_id,
)
sources = jwvideo["sources"]
formats = [
{
"url": source["file"].replace("\\", ""),
"format_id": source.get("label"),
"height": int(
self._search_regex(
r"^(\d+)[pP]", source.get("label", ""), "height", default=None
)
),
}
for source in sources
if source.get("file")
]
self._sort_formats(formats)
title = self._html_search_regex(
r"<title>([^<]+)\s*-\s*Sexu\.Com</title>", webpage, "title"
)
description = self._html_search_meta("description", webpage, "description")
thumbnail = jwvideo.get("image")
categories_str = self._html_search_meta("keywords", webpage, "categories")
categories = None if categories_str is None else categories_str.split(",")
return {
"id": video_id,
"title": title,
"description": description,
"thumbnail": thumbnail,
"categories": categories,
"formats": formats,
"age_limit": 18,
}
|
cssutils | _codec3 | #!/usr/bin/env python
"""Python codec for CSS."""
__docformat__ = "restructuredtext"
__author__ = "Walter Doerwald"
__version__ = "$Id: util.py 1114 2008-03-05 13:22:59Z cthedot $"
import codecs
import marshal
import sys
# We're using bits to store all possible candidate encodings (or variants, i.e.
# we have two bits for the variants of UTF-16 and two for the
# variants of UTF-32).
#
# Prefixes for various CSS encodings
# UTF-8-SIG xEF xBB xBF
# UTF-16 (LE) xFF xFE ~x00|~x00
# UTF-16 (BE) xFE xFF
# UTF-16-LE @ x00 @ x00
# UTF-16-BE x00 @
# UTF-32 (LE) xFF xFE x00 x00
# UTF-32 (BE) x00 x00 xFE xFF
# UTF-32-LE @ x00 x00 x00
# UTF-32-BE x00 x00 x00 @
# CHARSET @ c h a ...
def chars(bytestring):
return "".join(chr(byte) for byte in bytestring)
def detectencoding_str(input, final=False):
"""
Detect the encoding of the byte string ``input``, which contains the
beginning of a CSS file. This function returns the detected encoding (or
``None`` if it hasn't got enough data), and a flag that indicates whether
that encoding has been detected explicitely or implicitely. To detect the
encoding the first few bytes are used (or if ``input`` is ASCII compatible
and starts with a charset rule the encoding name from the rule). "Explicit"
detection means that the bytes start with a BOM or a charset rule.
If the encoding can't be detected yet, ``None`` is returned as the encoding.
``final`` specifies whether more data will be available in later calls or
not. If ``final`` is true, ``detectencoding_str()`` will never return
``None`` as the encoding.
"""
# A bit for every candidate
CANDIDATE_UTF_8_SIG = 1
CANDIDATE_UTF_16_AS_LE = 2
CANDIDATE_UTF_16_AS_BE = 4
CANDIDATE_UTF_16_LE = 8
CANDIDATE_UTF_16_BE = 16
CANDIDATE_UTF_32_AS_LE = 32
CANDIDATE_UTF_32_AS_BE = 64
CANDIDATE_UTF_32_LE = 128
CANDIDATE_UTF_32_BE = 256
CANDIDATE_CHARSET = 512
candidates = 1023 # all candidates
# input = chars(input)
li = len(input)
if li >= 1:
# Check first byte
c = input[0]
if c != b"\xef"[0]:
candidates &= ~CANDIDATE_UTF_8_SIG
if c != b"\xff"[0]:
candidates &= ~(CANDIDATE_UTF_32_AS_LE | CANDIDATE_UTF_16_AS_LE)
if c != b"\xfe"[0]:
candidates &= ~CANDIDATE_UTF_16_AS_BE
if c != b"@"[0]:
candidates &= ~(
CANDIDATE_UTF_32_LE | CANDIDATE_UTF_16_LE | CANDIDATE_CHARSET
)
if c != b"\x00"[0]:
candidates &= ~(
CANDIDATE_UTF_32_AS_BE | CANDIDATE_UTF_32_BE | CANDIDATE_UTF_16_BE
)
if li >= 2:
# Check second byte
c = input[1]
if c != b"\xbb"[0]:
candidates &= ~CANDIDATE_UTF_8_SIG
if c != b"\xfe"[0]:
candidates &= ~(CANDIDATE_UTF_16_AS_LE | CANDIDATE_UTF_32_AS_LE)
if c != b"\xff"[0]:
candidates &= ~CANDIDATE_UTF_16_AS_BE
if c != b"\x00"[0]:
candidates &= ~(
CANDIDATE_UTF_16_LE
| CANDIDATE_UTF_32_AS_BE
| CANDIDATE_UTF_32_LE
| CANDIDATE_UTF_32_BE
)
if c != b"@"[0]:
candidates &= ~CANDIDATE_UTF_16_BE
if c != b"c"[0]:
candidates &= ~CANDIDATE_CHARSET
if li >= 3:
# Check third byte
c = input[2]
if c != b"\xbf"[0]:
candidates &= ~CANDIDATE_UTF_8_SIG
if c != b"c"[0]:
candidates &= ~CANDIDATE_UTF_16_LE
if c != b"\x00"[0]:
candidates &= ~(
CANDIDATE_UTF_32_AS_LE
| CANDIDATE_UTF_32_LE
| CANDIDATE_UTF_32_BE
)
if c != b"\xfe"[0]:
candidates &= ~CANDIDATE_UTF_32_AS_BE
if c != b"h"[0]:
candidates &= ~CANDIDATE_CHARSET
if li >= 4:
# Check fourth byte
c = input[3]
if input[2:4] == b"\x00\x00"[0:2]:
candidates &= ~CANDIDATE_UTF_16_AS_LE
if c != b"\x00"[0]:
candidates &= ~(
CANDIDATE_UTF_16_LE
| CANDIDATE_UTF_32_AS_LE
| CANDIDATE_UTF_32_LE
)
if c != b"\xff"[0]:
candidates &= ~CANDIDATE_UTF_32_AS_BE
if c != b"@"[0]:
candidates &= ~CANDIDATE_UTF_32_BE
if c != b"a"[0]:
candidates &= ~CANDIDATE_CHARSET
if candidates == 0:
return ("utf-8", False)
if not (candidates & (candidates - 1)): # only one candidate remaining
if candidates == CANDIDATE_UTF_8_SIG and li >= 3:
return ("utf-8-sig", True)
elif candidates == CANDIDATE_UTF_16_AS_LE and li >= 2:
return ("utf-16", True)
elif candidates == CANDIDATE_UTF_16_AS_BE and li >= 2:
return ("utf-16", True)
elif candidates == CANDIDATE_UTF_16_LE and li >= 4:
return ("utf-16-le", False)
elif candidates == CANDIDATE_UTF_16_BE and li >= 2:
return ("utf-16-be", False)
elif candidates == CANDIDATE_UTF_32_AS_LE and li >= 4:
return ("utf-32", True)
elif candidates == CANDIDATE_UTF_32_AS_BE and li >= 4:
return ("utf-32", True)
elif candidates == CANDIDATE_UTF_32_LE and li >= 4:
return ("utf-32-le", False)
elif candidates == CANDIDATE_UTF_32_BE and li >= 4:
return ("utf-32-be", False)
elif candidates == CANDIDATE_CHARSET and li >= 4:
prefix = '@charset "'
charsinput = chars(input)
if charsinput[: len(prefix)] == prefix:
pos = charsinput.find('"', len(prefix))
if pos >= 0:
# TODO: return str and not bytes!
return (charsinput[len(prefix) : pos], True)
# if this is the last call, and we haven't determined an encoding yet,
# we default to UTF-8
if final:
return ("utf-8", False)
return (None, False) # dont' know yet
def detectencoding_unicode(input, final=False):
"""
Detect the encoding of the unicode string ``input``, which contains the
beginning of a CSS file. The encoding is detected from the charset rule
at the beginning of ``input``. If there is no charset rule, ``"utf-8"``
will be returned.
If the encoding can't be detected yet, ``None`` is returned. ``final``
specifies whether more data will be available in later calls or not. If
``final`` is true, ``detectencoding_unicode()`` will never return ``None``.
"""
prefix = '@charset "'
if input.startswith(prefix):
pos = input.find('"', len(prefix))
if pos >= 0:
return (input[len(prefix) : pos], True)
elif final or not prefix.startswith(input):
# if this is the last call, and we haven't determined an encoding yet,
# (or the string definitely doesn't start with prefix) we default to UTF-8
return ("utf-8", False)
return (None, False) # don't know yet
def _fixencoding(input, encoding, final=False):
"""
Replace the name of the encoding in the charset rule at the beginning of
``input`` with ``encoding``. If ``input`` doesn't starts with a charset
rule, ``input`` will be returned unmodified.
If the encoding can't be found yet, ``None`` is returned. ``final``
specifies whether more data will be available in later calls or not.
If ``final`` is true, ``_fixencoding()`` will never return ``None``.
"""
prefix = '@charset "'
if len(input) > len(prefix):
if input.startswith(prefix):
pos = input.find('"', len(prefix))
if pos >= 0:
if encoding.replace("_", "-").lower() == "utf-8-sig":
encoding = "utf-8"
return prefix + encoding + input[pos:]
# we haven't seen the end of the encoding name yet => fall through
else:
return input # doesn't start with prefix, so nothing to fix
elif not prefix.startswith(input) or final:
# can't turn out to be a @charset rule later (or there is no "later")
return input
if final:
return input
return None # don't know yet
def decode(input, errors="strict", encoding=None, force=True):
try:
# py 3 only, memory?! object to bytes
input = input.tobytes()
except AttributeError as e:
pass
if encoding is None or not force:
(_encoding, explicit) = detectencoding_str(input, True)
if _encoding == "css":
raise ValueError("css not allowed as encoding name")
if (
explicit and not force
) or encoding is None: # Take the encoding from the input
encoding = _encoding
# NEEDS: change in parse.py (str to bytes!)
(input, consumed) = codecs.getdecoder(encoding)(input, errors)
return (_fixencoding(input, str(encoding), True), consumed)
def encode(input, errors="strict", encoding=None):
consumed = len(input)
if encoding is None:
encoding = detectencoding_unicode(input, True)[0]
if encoding.replace("_", "-").lower() == "utf-8-sig":
input = _fixencoding(input, "utf-8", True)
else:
input = _fixencoding(input, str(encoding), True)
if encoding == "css":
raise ValueError("css not allowed as encoding name")
encoder = codecs.getencoder(encoding)
return (encoder(input, errors)[0], consumed)
def _bytes2int(bytes):
# Helper: convert an 8 bit string into an ``int``.
i = 0
for byte in bytes:
i = (i << 8) + ord(byte)
return i
def _int2bytes(i):
# Helper: convert an ``int`` into an 8-bit string.
v = []
while i:
v.insert(0, chr(i & 0xFF))
i >>= 8
return "".join(v)
if hasattr(codecs, "IncrementalDecoder"):
class IncrementalDecoder(codecs.IncrementalDecoder):
def __init__(self, errors="strict", encoding=None, force=True):
self.decoder = None
self.encoding = encoding
self.force = force
codecs.IncrementalDecoder.__init__(self, errors)
# Store ``errors`` somewhere else,
# because we have to hide it in a property
self._errors = errors
self.buffer = b""
self.headerfixed = False
def iterdecode(self, input):
for part in input:
result = self.decode(part, False)
if result:
yield result
result = self.decode("", True)
if result:
yield result
def decode(self, input, final=False):
# We're doing basically the same as a ``BufferedIncrementalDecoder``,
# but since the buffer is only relevant until the encoding has been
# detected (in which case the buffer of the underlying codec might
# kick in), we're implementing buffering ourselves to avoid some
# overhead.
if self.decoder is None:
input = self.buffer + input
# Do we have to detect the encoding from the input?
if self.encoding is None or not self.force:
(encoding, explicit) = detectencoding_str(input, final)
if encoding is None: # no encoding determined yet
self.buffer = input # retry the complete input on the next call
return "" # no encoding determined yet, so no output
elif encoding == "css":
raise ValueError("css not allowed as encoding name")
if (
explicit and not self.force
) or self.encoding is None: # Take the encoding from the input
self.encoding = encoding
self.buffer = "" # drop buffer, as the decoder might keep its own
decoder = codecs.getincrementaldecoder(self.encoding)
self.decoder = decoder(self._errors)
if self.headerfixed:
return self.decoder.decode(input, final)
# If we haven't fixed the header yet,
# the content of ``self.buffer`` is a ``unicode`` object
output = self.buffer + self.decoder.decode(input, final)
encoding = self.encoding
if encoding.replace("_", "-").lower() == "utf-8-sig":
encoding = "utf-8"
newoutput = _fixencoding(output, str(encoding), final)
if newoutput is None:
# retry fixing the @charset rule (but keep the decoded stuff)
self.buffer = output
return ""
self.headerfixed = True
return newoutput
def reset(self):
codecs.IncrementalDecoder.reset(self)
self.decoder = None
self.buffer = b""
self.headerfixed = False
def _geterrors(self):
return self._errors
def _seterrors(self, errors):
# Setting ``errors`` must be done on the real decoder too
if self.decoder is not None:
self.decoder.errors = errors
self._errors = errors
errors = property(_geterrors, _seterrors)
def getstate(self):
if self.decoder is not None:
state = (
self.encoding,
self.buffer,
self.headerfixed,
True,
self.decoder.getstate(),
)
else:
state = (self.encoding, self.buffer, self.headerfixed, False, None)
return ("", _bytes2int(marshal.dumps(state)))
def setstate(self, state):
state = _int2bytes(marshal.loads(state[1])) # ignore buffered input
self.encoding = state[0]
self.buffer = state[1]
self.headerfixed = state[2]
if state[3] is not None:
self.decoder = codecs.getincrementaldecoder(self.encoding)(self._errors)
self.decoder.setstate(state[4])
else:
self.decoder = None
if hasattr(codecs, "IncrementalEncoder"):
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors="strict", encoding=None):
self.encoder = None
self.encoding = encoding
codecs.IncrementalEncoder.__init__(self, errors)
# Store ``errors`` somewhere else,
# because we have to hide it in a property
self._errors = errors
self.buffer = ""
def iterencode(self, input):
for part in input:
result = self.encode(part, False)
if result:
yield result
result = self.encode("", True)
if result:
yield result
def encode(self, input, final=False):
if self.encoder is None:
input = self.buffer + input
if self.encoding is not None:
# Replace encoding in the @charset rule with the specified one
encoding = self.encoding
if encoding.replace("_", "-").lower() == "utf-8-sig":
encoding = "utf-8"
newinput = _fixencoding(input, str(encoding), final)
if newinput is None: # @charset rule incomplete => Retry next time
self.buffer = input
return ""
input = newinput
else:
# Use encoding from the @charset declaration
self.encoding = detectencoding_unicode(input, final)[0]
if self.encoding is not None:
if self.encoding == "css":
raise ValueError("css not allowed as encoding name")
info = codecs.lookup(self.encoding)
encoding = self.encoding
if self.encoding.replace("_", "-").lower() == "utf-8-sig":
input = _fixencoding(input, "utf-8", True)
self.encoder = info.incrementalencoder(self._errors)
self.buffer = ""
else:
self.buffer = input
return ""
return self.encoder.encode(input, final)
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.encoder = None
self.buffer = ""
def _geterrors(self):
return self._errors
def _seterrors(self, errors):
# Setting ``errors ``must be done on the real encoder too
if self.encoder is not None:
self.encoder.errors = errors
self._errors = errors
errors = property(_geterrors, _seterrors)
def getstate(self):
if self.encoder is not None:
state = (self.encoding, self.buffer, True, self.encoder.getstate())
else:
state = (self.encoding, self.buffer, False, None)
return _bytes2int(marshal.dumps(state))
def setstate(self, state):
state = _int2bytes(marshal.loads(state))
self.encoding = state[0]
self.buffer = state[1]
if state[2] is not None:
self.encoder = codecs.getincrementalencoder(self.encoding)(self._errors)
self.encoder.setstate(state[4])
else:
self.encoder = None
class StreamWriter(codecs.StreamWriter):
def __init__(self, stream, errors="strict", encoding=None, header=False):
codecs.StreamWriter.__init__(self, stream, errors)
self.streamwriter = None
self.encoding = encoding
self._errors = errors
self.buffer = ""
def encode(self, input, errors="strict"):
li = len(input)
if self.streamwriter is None:
input = self.buffer + input
li = len(input)
if self.encoding is not None:
# Replace encoding in the @charset rule with the specified one
encoding = self.encoding
if encoding.replace("_", "-").lower() == "utf-8-sig":
encoding = "utf-8"
newinput = _fixencoding(input, str(encoding), False)
if newinput is None: # @charset rule incomplete => Retry next time
self.buffer = input
return ("", 0)
input = newinput
else:
# Use encoding from the @charset declaration
self.encoding = detectencoding_unicode(input, False)[0]
if self.encoding is not None:
if self.encoding == "css":
raise ValueError("css not allowed as encoding name")
self.streamwriter = codecs.getwriter(self.encoding)(
self.stream, self._errors
)
encoding = self.encoding
if self.encoding.replace("_", "-").lower() == "utf-8-sig":
input = _fixencoding(input, "utf-8", True)
self.buffer = ""
else:
self.buffer = input
return ("", 0)
return (self.streamwriter.encode(input, errors)[0], li)
def _geterrors(self):
return self._errors
def _seterrors(self, errors):
# Setting ``errors`` must be done on the streamwriter too
try:
if self.streamwriter is not None:
self.streamwriter.errors = errors
except AttributeError as e:
# TODO: py3 only exception?
pass
self._errors = errors
errors = property(_geterrors, _seterrors)
class StreamReader(codecs.StreamReader):
def __init__(self, stream, errors="strict", encoding=None, force=True):
codecs.StreamReader.__init__(self, stream, errors)
self.streamreader = None
self.encoding = encoding
self.force = force
self._errors = errors
def decode(self, input, errors="strict"):
if self.streamreader is None:
if self.encoding is None or not self.force:
(encoding, explicit) = detectencoding_str(input, False)
if encoding is None: # no encoding determined yet
return ("", 0) # no encoding determined yet, so no output
elif encoding == "css":
raise ValueError("css not allowed as encoding name")
if (
explicit and not self.force
) or self.encoding is None: # Take the encoding from the input
self.encoding = encoding
streamreader = codecs.getreader(self.encoding)
streamreader = streamreader(self.stream, self._errors)
(output, consumed) = streamreader.decode(input, errors)
encoding = self.encoding
if encoding.replace("_", "-").lower() == "utf-8-sig":
encoding = "utf-8"
newoutput = _fixencoding(output, str(encoding), False)
if newoutput is not None:
self.streamreader = streamreader
return (newoutput, consumed)
return ("", 0) # we will create a new streamreader on the next call
return self.streamreader.decode(input, errors)
def _geterrors(self):
return self._errors
def _seterrors(self, errors):
# Setting ``errors`` must be done on the streamreader too
try:
if self.streamreader is not None:
self.streamreader.errors = errors
except AttributeError as e:
# TODO: py3 only exception?
pass
self._errors = errors
errors = property(_geterrors, _seterrors)
if hasattr(codecs, "CodecInfo"):
# We're running on Python 2.5 or better
def search_function(name):
if name == "css":
return codecs.CodecInfo(
name="css",
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
else:
# If we're running on Python 2.4, define the utf-8-sig codec here
def utf8sig_encode(input, errors="strict"):
return (codecs.BOM_UTF8 + codecs.utf_8_encode(input, errors)[0], len(input))
def utf8sig_decode(input, errors="strict"):
prefix = 0
if input[:3] == codecs.BOM_UTF8:
input = input[3:]
prefix = 3
(output, consumed) = codecs.utf_8_decode(input, errors, True)
return (output, consumed + prefix)
class UTF8SigStreamWriter(codecs.StreamWriter):
def reset(self):
codecs.StreamWriter.reset(self)
try:
del self.encode
except AttributeError:
pass
def encode(self, input, errors="strict"):
self.encode = codecs.utf_8_encode
return utf8sig_encode(input, errors)
class UTF8SigStreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors="strict"):
if len(input) < 3 and codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this is a BOM
# => try again on the next call
return ("", 0)
self.decode = codecs.utf_8_decode
return utf8sig_decode(input, errors)
def search_function(name):
import encodings
name = encodings.normalize_encoding(name)
if name == "css":
return (encode, decode, StreamReader, StreamWriter)
elif name == "utf_8_sig":
return (
utf8sig_encode,
utf8sig_decode,
UTF8SigStreamReader,
UTF8SigStreamWriter,
)
codecs.register(search_function)
# Error handler for CSS escaping
def cssescape(exc):
if not isinstance(exc, UnicodeEncodeError):
raise TypeError("don't know how to handle %r" % exc)
return (
"".join("\\%06x" % ord(c) for c in exc.object[exc.start : exc.end]),
exc.end,
)
codecs.register_error("cssescape", cssescape)
|
femexamples | buckling_platebuckling | # ***************************************************************************
# * Copyright (c) 2021 Bernd Hahnebach <bernd@bimstatik.org> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import Fem
import FreeCAD
import ObjectsFem
from . import manager
from .manager import get_meshname, init_doc
def get_information():
return {
"name": "Plate Buckling",
"meshtype": "face",
"meshelement": "Tria6",
"constraints": ["displacement", "force"],
"solvers": ["calculix", "ccxtools"],
"material": "solid",
"equations": ["buckling"],
}
def get_explanation(header=""):
return (
header
+ """
To run the example from Python console use:
from femexamples.buckling_platebuckling import setup
setup()
See forum topic post:
https://forum.freecad.org/viewtopic.php?f=18&t=20217&start=110#p509935
"""
)
def setup(doc=None, solvertype="ccxtools"):
# init FreeCAD document
if doc is None:
doc = init_doc()
# explanation object
# just keep the following line and change text string in get_explanation method
manager.add_explanation_obj(
doc, get_explanation(manager.get_header(get_information()))
)
# geometric object
geom_obj = doc.addObject("Part::Plane", "Plate")
geom_obj.Width = 6000
geom_obj.Length = 8000
doc.recompute()
if FreeCAD.GuiUp:
geom_obj.ViewObject.Document.activeView().viewAxonometric()
geom_obj.ViewObject.Document.activeView().fitAll()
# analysis
analysis = ObjectsFem.makeAnalysis(doc, "Analysis")
# solver
if solvertype == "calculix":
solver_obj = ObjectsFem.makeSolverCalculix(doc, "SolverCalculiX")
elif solvertype == "ccxtools":
solver_obj = ObjectsFem.makeSolverCalculixCcxTools(doc, "CalculiXccxTools")
solver_obj.WorkingDir = ""
else:
FreeCAD.Console.PrintWarning(
"Unknown or unsupported solver type: {}. "
"No solver object was created.\n".format(solvertype)
)
if solvertype == "calculix" or solvertype == "ccxtools":
solver_obj.SplitInputWriter = False
solver_obj.AnalysisType = "buckling"
solver_obj.BucklingFactors = 10
solver_obj.GeometricalNonlinearity = "linear"
solver_obj.ThermoMechSteadyState = False
solver_obj.MatrixSolverType = "default"
solver_obj.IterationsControlParameterTimeUse = False
solver_obj.BucklingFactors = 1
analysis.addObject(solver_obj)
# shell thickness
thickness_obj = ObjectsFem.makeElementGeometry2D(doc, 50, "Thickness")
analysis.addObject(thickness_obj)
# material
material_obj = ObjectsFem.makeMaterialSolid(doc, "Steel")
mat = material_obj.Material
mat["Name"] = "CalculiX-Steel"
mat["YoungsModulus"] = "210000 MPa"
mat["PoissonRatio"] = "0.30"
material_obj.Material = mat
analysis.addObject(material_obj)
# constraints displacement
con_disp_x = ObjectsFem.makeConstraintDisplacement(doc, "ConstraintDisplacement_X")
con_disp_x.References = [(geom_obj, "Edge1")]
con_disp_x.xFix = True
con_disp_x.xFree = False
analysis.addObject(con_disp_x)
con_disp_y = ObjectsFem.makeConstraintDisplacement(doc, "ConstraintDisplacement_Y")
con_disp_y.References = [(geom_obj, "Vertex1")]
con_disp_y.yFix = True
con_disp_y.yFree = False
analysis.addObject(con_disp_y)
con_disp_z = ObjectsFem.makeConstraintDisplacement(doc, "ConstraintDisplacement_Z")
con_disp_z.References = [
(geom_obj, "Edge1"),
(geom_obj, "Edge2"),
(geom_obj, "Edge3"),
(geom_obj, "Edge4"),
]
con_disp_z.zFix = True
con_disp_z.zFree = False
analysis.addObject(con_disp_z)
# constraint force
con_force = ObjectsFem.makeConstraintForce(doc, "ConstraintForce")
con_force.References = [(geom_obj, "Edge3")]
con_force.Force = 17162160 # 17'162.16 N
con_force.Reversed = True
con_force.Direction = (geom_obj, ["Edge2"])
analysis.addObject(con_force)
# mesh
from .meshes.mesh_buckling_plate_tria6 import create_elements, create_nodes
fem_mesh = Fem.FemMesh()
control = create_nodes(fem_mesh)
if not control:
FreeCAD.Console.PrintError("Error on creating nodes.\n")
control = create_elements(fem_mesh)
if not control:
FreeCAD.Console.PrintError("Error on creating elements.\n")
femmesh_obj = analysis.addObject(ObjectsFem.makeMeshGmsh(doc, get_meshname()))[0]
femmesh_obj.FemMesh = fem_mesh
femmesh_obj.Part = geom_obj
femmesh_obj.SecondOrderLinear = False
femmesh_obj.CharacteristicLengthMax = "300.0 mm"
femmesh_obj.ElementDimension = "2D"
doc.recompute()
return doc
|
chardet | gb2312freq | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# GB2312 most frequently used character table
#
# Char to FreqOrder table , from hz6763
# 512 --> 0.79 -- 0.79
# 1024 --> 0.92 -- 0.13
# 2048 --> 0.98 -- 0.06
# 6768 --> 1.00 -- 0.02
#
# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
# Random Distribution Ration = 512 / (3755 - 512) = 0.157
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
GB2312_TABLE_SIZE = 3760
GB2312CharToFreqOrder = (
1671,
749,
1443,
2364,
3924,
3807,
2330,
3921,
1704,
3463,
2691,
1511,
1515,
572,
3191,
2205,
2361,
224,
2558,
479,
1711,
963,
3162,
440,
4060,
1905,
2966,
2947,
3580,
2647,
3961,
3842,
2204,
869,
4207,
970,
2678,
5626,
2944,
2956,
1479,
4048,
514,
3595,
588,
1346,
2820,
3409,
249,
4088,
1746,
1873,
2047,
1774,
581,
1813,
358,
1174,
3590,
1014,
1561,
4844,
2245,
670,
1636,
3112,
889,
1286,
953,
556,
2327,
3060,
1290,
3141,
613,
185,
3477,
1367,
850,
3820,
1715,
2428,
2642,
2303,
2732,
3041,
2562,
2648,
3566,
3946,
1349,
388,
3098,
2091,
1360,
3585,
152,
1687,
1539,
738,
1559,
59,
1232,
2925,
2267,
1388,
1249,
1741,
1679,
2960,
151,
1566,
1125,
1352,
4271,
924,
4296,
385,
3166,
4459,
310,
1245,
2850,
70,
3285,
2729,
3534,
3575,
2398,
3298,
3466,
1960,
2265,
217,
3647,
864,
1909,
2084,
4401,
2773,
1010,
3269,
5152,
853,
3051,
3121,
1244,
4251,
1895,
364,
1499,
1540,
2313,
1180,
3655,
2268,
562,
715,
2417,
3061,
544,
336,
3768,
2380,
1752,
4075,
950,
280,
2425,
4382,
183,
2759,
3272,
333,
4297,
2155,
1688,
2356,
1444,
1039,
4540,
736,
1177,
3349,
2443,
2368,
2144,
2225,
565,
196,
1482,
3406,
927,
1335,
4147,
692,
878,
1311,
1653,
3911,
3622,
1378,
4200,
1840,
2969,
3149,
2126,
1816,
2534,
1546,
2393,
2760,
737,
2494,
13,
447,
245,
2747,
38,
2765,
2129,
2589,
1079,
606,
360,
471,
3755,
2890,
404,
848,
699,
1785,
1236,
370,
2221,
1023,
3746,
2074,
2026,
2023,
2388,
1581,
2119,
812,
1141,
3091,
2536,
1519,
804,
2053,
406,
1596,
1090,
784,
548,
4414,
1806,
2264,
2936,
1100,
343,
4114,
5096,
622,
3358,
743,
3668,
1510,
1626,
5020,
3567,
2513,
3195,
4115,
5627,
2489,
2991,
24,
2065,
2697,
1087,
2719,
48,
1634,
315,
68,
985,
2052,
198,
2239,
1347,
1107,
1439,
597,
2366,
2172,
871,
3307,
919,
2487,
2790,
1867,
236,
2570,
1413,
3794,
906,
3365,
3381,
1701,
1982,
1818,
1524,
2924,
1205,
616,
2586,
2072,
2004,
575,
253,
3099,
32,
1365,
1182,
197,
1714,
2454,
1201,
554,
3388,
3224,
2748,
756,
2587,
250,
2567,
1507,
1517,
3529,
1922,
2761,
2337,
3416,
1961,
1677,
2452,
2238,
3153,
615,
911,
1506,
1474,
2495,
1265,
1906,
2749,
3756,
3280,
2161,
898,
2714,
1759,
3450,
2243,
2444,
563,
26,
3286,
2266,
3769,
3344,
2707,
3677,
611,
1402,
531,
1028,
2871,
4548,
1375,
261,
2948,
835,
1190,
4134,
353,
840,
2684,
1900,
3082,
1435,
2109,
1207,
1674,
329,
1872,
2781,
4055,
2686,
2104,
608,
3318,
2423,
2957,
2768,
1108,
3739,
3512,
3271,
3985,
2203,
1771,
3520,
1418,
2054,
1681,
1153,
225,
1627,
2929,
162,
2050,
2511,
3687,
1954,
124,
1859,
2431,
1684,
3032,
2894,
585,
4805,
3969,
2869,
2704,
2088,
2032,
2095,
3656,
2635,
4362,
2209,
256,
518,
2042,
2105,
3777,
3657,
643,
2298,
1148,
1779,
190,
989,
3544,
414,
11,
2135,
2063,
2979,
1471,
403,
3678,
126,
770,
1563,
671,
2499,
3216,
2877,
600,
1179,
307,
2805,
4937,
1268,
1297,
2694,
252,
4032,
1448,
1494,
1331,
1394,
127,
2256,
222,
1647,
1035,
1481,
3056,
1915,
1048,
873,
3651,
210,
33,
1608,
2516,
200,
1520,
415,
102,
0,
3389,
1287,
817,
91,
3299,
2940,
836,
1814,
549,
2197,
1396,
1669,
2987,
3582,
2297,
2848,
4528,
1070,
687,
20,
1819,
121,
1552,
1364,
1461,
1968,
2617,
3540,
2824,
2083,
177,
948,
4938,
2291,
110,
4549,
2066,
648,
3359,
1755,
2110,
2114,
4642,
4845,
1693,
3937,
3308,
1257,
1869,
2123,
208,
1804,
3159,
2992,
2531,
2549,
3361,
2418,
1350,
2347,
2800,
2568,
1291,
2036,
2680,
72,
842,
1990,
212,
1233,
1154,
1586,
75,
2027,
3410,
4900,
1823,
1337,
2710,
2676,
728,
2810,
1522,
3026,
4995,
157,
755,
1050,
4022,
710,
785,
1936,
2194,
2085,
1406,
2777,
2400,
150,
1250,
4049,
1206,
807,
1910,
534,
529,
3309,
1721,
1660,
274,
39,
2827,
661,
2670,
1578,
925,
3248,
3815,
1094,
4278,
4901,
4252,
41,
1150,
3747,
2572,
2227,
4501,
3658,
4902,
3813,
3357,
3617,
2884,
2258,
887,
538,
4187,
3199,
1294,
2439,
3042,
2329,
2343,
2497,
1255,
107,
543,
1527,
521,
3478,
3568,
194,
5062,
15,
961,
3870,
1241,
1192,
2664,
66,
5215,
3260,
2111,
1295,
1127,
2152,
3805,
4135,
901,
1164,
1976,
398,
1278,
530,
1460,
748,
904,
1054,
1966,
1426,
53,
2909,
509,
523,
2279,
1534,
536,
1019,
239,
1685,
460,
2353,
673,
1065,
2401,
3600,
4298,
2272,
1272,
2363,
284,
1753,
3679,
4064,
1695,
81,
815,
2677,
2757,
2731,
1386,
859,
500,
4221,
2190,
2566,
757,
1006,
2519,
2068,
1166,
1455,
337,
2654,
3203,
1863,
1682,
1914,
3025,
1252,
1409,
1366,
847,
714,
2834,
2038,
3209,
964,
2970,
1901,
885,
2553,
1078,
1756,
3049,
301,
1572,
3326,
688,
2130,
1996,
2429,
1805,
1648,
2930,
3421,
2750,
3652,
3088,
262,
1158,
1254,
389,
1641,
1812,
526,
1719,
923,
2073,
1073,
1902,
468,
489,
4625,
1140,
857,
2375,
3070,
3319,
2863,
380,
116,
1328,
2693,
1161,
2244,
273,
1212,
1884,
2769,
3011,
1775,
1142,
461,
3066,
1200,
2147,
2212,
790,
702,
2695,
4222,
1601,
1058,
434,
2338,
5153,
3640,
67,
2360,
4099,
2502,
618,
3472,
1329,
416,
1132,
830,
2782,
1807,
2653,
3211,
3510,
1662,
192,
2124,
296,
3979,
1739,
1611,
3684,
23,
118,
324,
446,
1239,
1225,
293,
2520,
3814,
3795,
2535,
3116,
17,
1074,
467,
2692,
2201,
387,
2922,
45,
1326,
3055,
1645,
3659,
2817,
958,
243,
1903,
2320,
1339,
2825,
1784,
3289,
356,
576,
865,
2315,
2381,
3377,
3916,
1088,
3122,
1713,
1655,
935,
628,
4689,
1034,
1327,
441,
800,
720,
894,
1979,
2183,
1528,
5289,
2702,
1071,
4046,
3572,
2399,
1571,
3281,
79,
761,
1103,
327,
134,
758,
1899,
1371,
1615,
879,
442,
215,
2605,
2579,
173,
2048,
2485,
1057,
2975,
3317,
1097,
2253,
3801,
4263,
1403,
1650,
2946,
814,
4968,
3487,
1548,
2644,
1567,
1285,
2,
295,
2636,
97,
946,
3576,
832,
141,
4257,
3273,
760,
3821,
3521,
3156,
2607,
949,
1024,
1733,
1516,
1803,
1920,
2125,
2283,
2665,
3180,
1501,
2064,
3560,
2171,
1592,
803,
3518,
1416,
732,
3897,
4258,
1363,
1362,
2458,
119,
1427,
602,
1525,
2608,
1605,
1639,
3175,
694,
3064,
10,
465,
76,
2000,
4846,
4208,
444,
3781,
1619,
3353,
2206,
1273,
3796,
740,
2483,
320,
1723,
2377,
3660,
2619,
1359,
1137,
1762,
1724,
2345,
2842,
1850,
1862,
912,
821,
1866,
612,
2625,
1735,
2573,
3369,
1093,
844,
89,
937,
930,
1424,
3564,
2413,
2972,
1004,
3046,
3019,
2011,
711,
3171,
1452,
4178,
428,
801,
1943,
432,
445,
2811,
206,
4136,
1472,
730,
349,
73,
397,
2802,
2547,
998,
1637,
1167,
789,
396,
3217,
154,
1218,
716,
1120,
1780,
2819,
4826,
1931,
3334,
3762,
2139,
1215,
2627,
552,
3664,
3628,
3232,
1405,
2383,
3111,
1356,
2652,
3577,
3320,
3101,
1703,
640,
1045,
1370,
1246,
4996,
371,
1575,
2436,
1621,
2210,
984,
4033,
1734,
2638,
16,
4529,
663,
2755,
3255,
1451,
3917,
2257,
1253,
1955,
2234,
1263,
2951,
214,
1229,
617,
485,
359,
1831,
1969,
473,
2310,
750,
2058,
165,
80,
2864,
2419,
361,
4344,
2416,
2479,
1134,
796,
3726,
1266,
2943,
860,
2715,
938,
390,
2734,
1313,
1384,
248,
202,
877,
1064,
2854,
522,
3907,
279,
1602,
297,
2357,
395,
3740,
137,
2075,
944,
4089,
2584,
1267,
3802,
62,
1533,
2285,
178,
176,
780,
2440,
201,
3707,
590,
478,
1560,
4354,
2117,
1075,
30,
74,
4643,
4004,
1635,
1441,
2745,
776,
2596,
238,
1077,
1692,
1912,
2844,
605,
499,
1742,
3947,
241,
3053,
980,
1749,
936,
2640,
4511,
2582,
515,
1543,
2162,
5322,
2892,
2993,
890,
2148,
1924,
665,
1827,
3581,
1032,
968,
3163,
339,
1044,
1896,
270,
583,
1791,
1720,
4367,
1194,
3488,
3669,
43,
2523,
1657,
163,
2167,
290,
1209,
1622,
3378,
550,
634,
2508,
2510,
695,
2634,
2384,
2512,
1476,
1414,
220,
1469,
2341,
2138,
2852,
3183,
2900,
4939,
2865,
3502,
1211,
3680,
854,
3227,
1299,
2976,
3172,
186,
2998,
1459,
443,
1067,
3251,
1495,
321,
1932,
3054,
909,
753,
1410,
1828,
436,
2441,
1119,
1587,
3164,
2186,
1258,
227,
231,
1425,
1890,
3200,
3942,
247,
959,
725,
5254,
2741,
577,
2158,
2079,
929,
120,
174,
838,
2813,
591,
1115,
417,
2024,
40,
3240,
1536,
1037,
291,
4151,
2354,
632,
1298,
2406,
2500,
3535,
1825,
1846,
3451,
205,
1171,
345,
4238,
18,
1163,
811,
685,
2208,
1217,
425,
1312,
1508,
1175,
4308,
2552,
1033,
587,
1381,
3059,
2984,
3482,
340,
1316,
4023,
3972,
792,
3176,
519,
777,
4690,
918,
933,
4130,
2981,
3741,
90,
3360,
2911,
2200,
5184,
4550,
609,
3079,
2030,
272,
3379,
2736,
363,
3881,
1130,
1447,
286,
779,
357,
1169,
3350,
3137,
1630,
1220,
2687,
2391,
747,
1277,
3688,
2618,
2682,
2601,
1156,
3196,
5290,
4034,
3102,
1689,
3596,
3128,
874,
219,
2783,
798,
508,
1843,
2461,
269,
1658,
1776,
1392,
1913,
2983,
3287,
2866,
2159,
2372,
829,
4076,
46,
4253,
2873,
1889,
1894,
915,
1834,
1631,
2181,
2318,
298,
664,
2818,
3555,
2735,
954,
3228,
3117,
527,
3511,
2173,
681,
2712,
3033,
2247,
2346,
3467,
1652,
155,
2164,
3382,
113,
1994,
450,
899,
494,
994,
1237,
2958,
1875,
2336,
1926,
3727,
545,
1577,
1550,
633,
3473,
204,
1305,
3072,
2410,
1956,
2471,
707,
2134,
841,
2195,
2196,
2663,
3843,
1026,
4940,
990,
3252,
4997,
368,
1092,
437,
3212,
3258,
1933,
1829,
675,
2977,
2893,
412,
943,
3723,
4644,
3294,
3283,
2230,
2373,
5154,
2389,
2241,
2661,
2323,
1404,
2524,
593,
787,
677,
3008,
1275,
2059,
438,
2709,
2609,
2240,
2269,
2246,
1446,
36,
1568,
1373,
3892,
1574,
2301,
1456,
3962,
693,
2276,
5216,
2035,
1143,
2720,
1919,
1797,
1811,
2763,
4137,
2597,
1830,
1699,
1488,
1198,
2090,
424,
1694,
312,
3634,
3390,
4179,
3335,
2252,
1214,
561,
1059,
3243,
2295,
2561,
975,
5155,
2321,
2751,
3772,
472,
1537,
3282,
3398,
1047,
2077,
2348,
2878,
1323,
3340,
3076,
690,
2906,
51,
369,
170,
3541,
1060,
2187,
2688,
3670,
2541,
1083,
1683,
928,
3918,
459,
109,
4427,
599,
3744,
4286,
143,
2101,
2730,
2490,
82,
1588,
3036,
2121,
281,
1860,
477,
4035,
1238,
2812,
3020,
2716,
3312,
1530,
2188,
2055,
1317,
843,
636,
1808,
1173,
3495,
649,
181,
1002,
147,
3641,
1159,
2414,
3750,
2289,
2795,
813,
3123,
2610,
1136,
4368,
5,
3391,
4541,
2174,
420,
429,
1728,
754,
1228,
2115,
2219,
347,
2223,
2733,
735,
1518,
3003,
2355,
3134,
1764,
3948,
3329,
1888,
2424,
1001,
1234,
1972,
3321,
3363,
1672,
1021,
1450,
1584,
226,
765,
655,
2526,
3404,
3244,
2302,
3665,
731,
594,
2184,
319,
1576,
621,
658,
2656,
4299,
2099,
3864,
1279,
2071,
2598,
2739,
795,
3086,
3699,
3908,
1707,
2352,
2402,
1382,
3136,
2475,
1465,
4847,
3496,
3865,
1085,
3004,
2591,
1084,
213,
2287,
1963,
3565,
2250,
822,
793,
4574,
3187,
1772,
1789,
3050,
595,
1484,
1959,
2770,
1080,
2650,
456,
422,
2996,
940,
3322,
4328,
4345,
3092,
2742,
965,
2784,
739,
4124,
952,
1358,
2498,
2949,
2565,
332,
2698,
2378,
660,
2260,
2473,
4194,
3856,
2919,
535,
1260,
2651,
1208,
1428,
1300,
1949,
1303,
2942,
433,
2455,
2450,
1251,
1946,
614,
1269,
641,
1306,
1810,
2737,
3078,
2912,
564,
2365,
1419,
1415,
1497,
4460,
2367,
2185,
1379,
3005,
1307,
3218,
2175,
1897,
3063,
682,
1157,
4040,
4005,
1712,
1160,
1941,
1399,
394,
402,
2952,
1573,
1151,
2986,
2404,
862,
299,
2033,
1489,
3006,
346,
171,
2886,
3401,
1726,
2932,
168,
2533,
47,
2507,
1030,
3735,
1145,
3370,
1395,
1318,
1579,
3609,
4560,
2857,
4116,
1457,
2529,
1965,
504,
1036,
2690,
2988,
2405,
745,
5871,
849,
2397,
2056,
3081,
863,
2359,
3857,
2096,
99,
1397,
1769,
2300,
4428,
1643,
3455,
1978,
1757,
3718,
1440,
35,
4879,
3742,
1296,
4228,
2280,
160,
5063,
1599,
2013,
166,
520,
3479,
1646,
3345,
3012,
490,
1937,
1545,
1264,
2182,
2505,
1096,
1188,
1369,
1436,
2421,
1667,
2792,
2460,
1270,
2122,
727,
3167,
2143,
806,
1706,
1012,
1800,
3037,
960,
2218,
1882,
805,
139,
2456,
1139,
1521,
851,
1052,
3093,
3089,
342,
2039,
744,
5097,
1468,
1502,
1585,
2087,
223,
939,
326,
2140,
2577,
892,
2481,
1623,
4077,
982,
3708,
135,
2131,
87,
2503,
3114,
2326,
1106,
876,
1616,
547,
2997,
2831,
2093,
3441,
4530,
4314,
9,
3256,
4229,
4148,
659,
1462,
1986,
1710,
2046,
2913,
2231,
4090,
4880,
5255,
3392,
3274,
1368,
3689,
4645,
1477,
705,
3384,
3635,
1068,
1529,
2941,
1458,
3782,
1509,
100,
1656,
2548,
718,
2339,
408,
1590,
2780,
3548,
1838,
4117,
3719,
1345,
3530,
717,
3442,
2778,
3220,
2898,
1892,
4590,
3614,
3371,
2043,
1998,
1224,
3483,
891,
635,
584,
2559,
3355,
733,
1766,
1729,
1172,
3789,
1891,
2307,
781,
2982,
2271,
1957,
1580,
5773,
2633,
2005,
4195,
3097,
1535,
3213,
1189,
1934,
5693,
3262,
586,
3118,
1324,
1598,
517,
1564,
2217,
1868,
1893,
4445,
3728,
2703,
3139,
1526,
1787,
1992,
3882,
2875,
1549,
1199,
1056,
2224,
1904,
2711,
5098,
4287,
338,
1993,
3129,
3489,
2689,
1809,
2815,
1997,
957,
1855,
3898,
2550,
3275,
3057,
1105,
1319,
627,
1505,
1911,
1883,
3526,
698,
3629,
3456,
1833,
1431,
746,
77,
1261,
2017,
2296,
1977,
1885,
125,
1334,
1600,
525,
1798,
1109,
2222,
1470,
1945,
559,
2236,
1186,
3443,
2476,
1929,
1411,
2411,
3135,
1777,
3372,
2621,
1841,
1613,
3229,
668,
1430,
1839,
2643,
2916,
195,
1989,
2671,
2358,
1387,
629,
3205,
2293,
5256,
4439,
123,
1310,
888,
1879,
4300,
3021,
3605,
1003,
1162,
3192,
2910,
2010,
140,
2395,
2859,
55,
1082,
2012,
2901,
662,
419,
2081,
1438,
680,
2774,
4654,
3912,
1620,
1731,
1625,
5035,
4065,
2328,
512,
1344,
802,
5443,
2163,
2311,
2537,
524,
3399,
98,
1155,
2103,
1918,
2606,
3925,
2816,
1393,
2465,
1504,
3773,
2177,
3963,
1478,
4346,
180,
1113,
4655,
3461,
2028,
1698,
833,
2696,
1235,
1322,
1594,
4408,
3623,
3013,
3225,
2040,
3022,
541,
2881,
607,
3632,
2029,
1665,
1219,
639,
1385,
1686,
1099,
2803,
3231,
1938,
3188,
2858,
427,
676,
2772,
1168,
2025,
454,
3253,
2486,
3556,
230,
1950,
580,
791,
1991,
1280,
1086,
1974,
2034,
630,
257,
3338,
2788,
4903,
1017,
86,
4790,
966,
2789,
1995,
1696,
1131,
259,
3095,
4188,
1308,
179,
1463,
5257,
289,
4107,
1248,
42,
3413,
1725,
2288,
896,
1947,
774,
4474,
4254,
604,
3430,
4264,
392,
2514,
2588,
452,
237,
1408,
3018,
988,
4531,
1970,
3034,
3310,
540,
2370,
1562,
1288,
2990,
502,
4765,
1147,
4,
1853,
2708,
207,
294,
2814,
4078,
2902,
2509,
684,
34,
3105,
3532,
2551,
644,
709,
2801,
2344,
573,
1727,
3573,
3557,
2021,
1081,
3100,
4315,
2100,
3681,
199,
2263,
1837,
2385,
146,
3484,
1195,
2776,
3949,
997,
1939,
3973,
1008,
1091,
1202,
1962,
1847,
1149,
4209,
5444,
1076,
493,
117,
5400,
2521,
972,
1490,
2934,
1796,
4542,
2374,
1512,
2933,
2657,
413,
2888,
1135,
2762,
2314,
2156,
1355,
2369,
766,
2007,
2527,
2170,
3124,
2491,
2593,
2632,
4757,
2437,
234,
3125,
3591,
1898,
1750,
1376,
1942,
3468,
3138,
570,
2127,
2145,
3276,
4131,
962,
132,
1445,
4196,
19,
941,
3624,
3480,
3366,
1973,
1374,
4461,
3431,
2629,
283,
2415,
2275,
808,
2887,
3620,
2112,
2563,
1353,
3610,
955,
1089,
3103,
1053,
96,
88,
4097,
823,
3808,
1583,
399,
292,
4091,
3313,
421,
1128,
642,
4006,
903,
2539,
1877,
2082,
596,
29,
4066,
1790,
722,
2157,
130,
995,
1569,
769,
1485,
464,
513,
2213,
288,
1923,
1101,
2453,
4316,
133,
486,
2445,
50,
625,
487,
2207,
57,
423,
481,
2962,
159,
3729,
1558,
491,
303,
482,
501,
240,
2837,
112,
3648,
2392,
1783,
362,
8,
3433,
3422,
610,
2793,
3277,
1390,
1284,
1654,
21,
3823,
734,
367,
623,
193,
287,
374,
1009,
1483,
816,
476,
313,
2255,
2340,
1262,
2150,
2899,
1146,
2581,
782,
2116,
1659,
2018,
1880,
255,
3586,
3314,
1110,
2867,
2137,
2564,
986,
2767,
5185,
2006,
650,
158,
926,
762,
881,
3157,
2717,
2362,
3587,
306,
3690,
3245,
1542,
3077,
2427,
1691,
2478,
2118,
2985,
3490,
2438,
539,
2305,
983,
129,
1754,
355,
4201,
2386,
827,
2923,
104,
1773,
2838,
2771,
411,
2905,
3919,
376,
767,
122,
1114,
828,
2422,
1817,
3506,
266,
3460,
1007,
1609,
4998,
945,
2612,
4429,
2274,
726,
1247,
1964,
2914,
2199,
2070,
4002,
4108,
657,
3323,
1422,
579,
455,
2764,
4737,
1222,
2895,
1670,
824,
1223,
1487,
2525,
558,
861,
3080,
598,
2659,
2515,
1967,
752,
2583,
2376,
2214,
4180,
977,
704,
2464,
4999,
2622,
4109,
1210,
2961,
819,
1541,
142,
2284,
44,
418,
457,
1126,
3730,
4347,
4626,
1644,
1876,
3671,
1864,
302,
1063,
5694,
624,
723,
1984,
3745,
1314,
1676,
2488,
1610,
1449,
3558,
3569,
2166,
2098,
409,
1011,
2325,
3704,
2306,
818,
1732,
1383,
1824,
1844,
3757,
999,
2705,
3497,
1216,
1423,
2683,
2426,
2954,
2501,
2726,
2229,
1475,
2554,
5064,
1971,
1794,
1666,
2014,
1343,
783,
724,
191,
2434,
1354,
2220,
5065,
1763,
2752,
2472,
4152,
131,
175,
2885,
3434,
92,
1466,
4920,
2616,
3871,
3872,
3866,
128,
1551,
1632,
669,
1854,
3682,
4691,
4125,
1230,
188,
2973,
3290,
1302,
1213,
560,
3266,
917,
763,
3909,
3249,
1760,
868,
1958,
764,
1782,
2097,
145,
2277,
3774,
4462,
64,
1491,
3062,
971,
2132,
3606,
2442,
221,
1226,
1617,
218,
323,
1185,
3207,
3147,
571,
619,
1473,
1005,
1744,
2281,
449,
1887,
2396,
3685,
275,
375,
3816,
1743,
3844,
3731,
845,
1983,
2350,
4210,
1377,
773,
967,
3499,
3052,
3743,
2725,
4007,
1697,
1022,
3943,
1464,
3264,
2855,
2722,
1952,
1029,
2839,
2467,
84,
4383,
2215,
820,
1391,
2015,
2448,
3672,
377,
1948,
2168,
797,
2545,
3536,
2578,
2645,
94,
2874,
1678,
405,
1259,
3071,
771,
546,
1315,
470,
1243,
3083,
895,
2468,
981,
969,
2037,
846,
4181,
653,
1276,
2928,
14,
2594,
557,
3007,
2474,
156,
902,
1338,
1740,
2574,
537,
2518,
973,
2282,
2216,
2433,
1928,
138,
2903,
1293,
2631,
1612,
646,
3457,
839,
2935,
111,
496,
2191,
2847,
589,
3186,
149,
3994,
2060,
4031,
2641,
4067,
3145,
1870,
37,
3597,
2136,
1025,
2051,
3009,
3383,
3549,
1121,
1016,
3261,
1301,
251,
2446,
2599,
2153,
872,
3246,
637,
334,
3705,
831,
884,
921,
3065,
3140,
4092,
2198,
1944,
246,
2964,
108,
2045,
1152,
1921,
2308,
1031,
203,
3173,
4170,
1907,
3890,
810,
1401,
2003,
1690,
506,
647,
1242,
2828,
1761,
1649,
3208,
2249,
1589,
3709,
2931,
5156,
1708,
498,
666,
2613,
834,
3817,
1231,
184,
2851,
1124,
883,
3197,
2261,
3710,
1765,
1553,
2658,
1178,
2639,
2351,
93,
1193,
942,
2538,
2141,
4402,
235,
1821,
870,
1591,
2192,
1709,
1871,
3341,
1618,
4126,
2595,
2334,
603,
651,
69,
701,
268,
2662,
3411,
2555,
1380,
1606,
503,
448,
254,
2371,
2646,
574,
1187,
2309,
1770,
322,
2235,
1292,
1801,
305,
566,
1133,
229,
2067,
2057,
706,
167,
483,
2002,
2672,
3295,
1820,
3561,
3067,
316,
378,
2746,
3452,
1112,
136,
1981,
507,
1651,
2917,
1117,
285,
4591,
182,
2580,
3522,
1304,
335,
3303,
1835,
2504,
1795,
1792,
2248,
674,
1018,
2106,
2449,
1857,
2292,
2845,
976,
3047,
1781,
2600,
2727,
1389,
1281,
52,
3152,
153,
265,
3950,
672,
3485,
3951,
4463,
430,
1183,
365,
278,
2169,
27,
1407,
1336,
2304,
209,
1340,
1730,
2202,
1852,
2403,
2883,
979,
1737,
1062,
631,
2829,
2542,
3876,
2592,
825,
2086,
2226,
3048,
3625,
352,
1417,
3724,
542,
991,
431,
1351,
3938,
1861,
2294,
826,
1361,
2927,
3142,
3503,
1738,
463,
2462,
2723,
582,
1916,
1595,
2808,
400,
3845,
3891,
2868,
3621,
2254,
58,
2492,
1123,
910,
2160,
2614,
1372,
1603,
1196,
1072,
3385,
1700,
3267,
1980,
696,
480,
2430,
920,
799,
1570,
2920,
1951,
2041,
4047,
2540,
1321,
4223,
2469,
3562,
2228,
1271,
2602,
401,
2833,
3351,
2575,
5157,
907,
2312,
1256,
410,
263,
3507,
1582,
996,
678,
1849,
2316,
1480,
908,
3545,
2237,
703,
2322,
667,
1826,
2849,
1531,
2604,
2999,
2407,
3146,
2151,
2630,
1786,
3711,
469,
3542,
497,
3899,
2409,
858,
837,
4446,
3393,
1274,
786,
620,
1845,
2001,
3311,
484,
308,
3367,
1204,
1815,
3691,
2332,
1532,
2557,
1842,
2020,
2724,
1927,
2333,
4440,
567,
22,
1673,
2728,
4475,
1987,
1858,
1144,
1597,
101,
1832,
3601,
12,
974,
3783,
4391,
951,
1412,
1,
3720,
453,
4608,
4041,
528,
1041,
1027,
3230,
2628,
1129,
875,
1051,
3291,
1203,
2262,
1069,
2860,
2799,
2149,
2615,
3278,
144,
1758,
3040,
31,
475,
1680,
366,
2685,
3184,
311,
1642,
4008,
2466,
5036,
1593,
1493,
2809,
216,
1420,
1668,
233,
304,
2128,
3284,
232,
1429,
1768,
1040,
2008,
3407,
2740,
2967,
2543,
242,
2133,
778,
1565,
2022,
2620,
505,
2189,
2756,
1098,
2273,
372,
1614,
708,
553,
2846,
2094,
2278,
169,
3626,
2835,
4161,
228,
2674,
3165,
809,
1454,
1309,
466,
1705,
1095,
900,
3423,
880,
2667,
3751,
5258,
2317,
3109,
2571,
4317,
2766,
1503,
1342,
866,
4447,
1118,
63,
2076,
314,
1881,
1348,
1061,
172,
978,
3515,
1747,
532,
511,
3970,
6,
601,
905,
2699,
3300,
1751,
276,
1467,
3725,
2668,
65,
4239,
2544,
2779,
2556,
1604,
578,
2451,
1802,
992,
2331,
2624,
1320,
3446,
713,
1513,
1013,
103,
2786,
2447,
1661,
886,
1702,
916,
654,
3574,
2031,
1556,
751,
2178,
2821,
2179,
1498,
1538,
2176,
271,
914,
2251,
2080,
1325,
638,
1953,
2937,
3877,
2432,
2754,
95,
3265,
1716,
260,
1227,
4083,
775,
106,
1357,
3254,
426,
1607,
555,
2480,
772,
1985,
244,
2546,
474,
495,
1046,
2611,
1851,
2061,
71,
2089,
1675,
2590,
742,
3758,
2843,
3222,
1433,
267,
2180,
2576,
2826,
2233,
2092,
3913,
2435,
956,
1745,
3075,
856,
2113,
1116,
451,
3,
1988,
2896,
1398,
993,
2463,
1878,
2049,
1341,
2718,
2721,
2870,
2108,
712,
2904,
4363,
2753,
2324,
277,
2872,
2349,
2649,
384,
987,
435,
691,
3000,
922,
164,
3939,
652,
1500,
1184,
4153,
2482,
3373,
2165,
4848,
2335,
3775,
3508,
3154,
2806,
2830,
1554,
2102,
1664,
2530,
1434,
2408,
893,
1547,
2623,
3447,
2832,
2242,
2532,
3169,
2856,
3223,
2078,
49,
3770,
3469,
462,
318,
656,
2259,
3250,
3069,
679,
1629,
2758,
344,
1138,
1104,
3120,
1836,
1283,
3115,
2154,
1437,
4448,
934,
759,
1999,
794,
2862,
1038,
533,
2560,
1722,
2342,
855,
2626,
1197,
1663,
4476,
3127,
85,
4240,
2528,
25,
1111,
1181,
3673,
407,
3470,
4561,
2679,
2713,
768,
1925,
2841,
3986,
1544,
1165,
932,
373,
1240,
2146,
1930,
2673,
721,
4766,
354,
4333,
391,
2963,
187,
61,
3364,
1442,
1102,
330,
1940,
1767,
341,
3809,
4118,
393,
2496,
2062,
2211,
105,
331,
300,
439,
913,
1332,
626,
379,
3304,
1557,
328,
689,
3952,
309,
1555,
931,
317,
2517,
3027,
325,
569,
686,
2107,
3084,
60,
1042,
1333,
2794,
264,
3177,
4014,
1628,
258,
3712,
7,
4464,
1176,
1043,
1778,
683,
114,
1975,
78,
1492,
383,
1886,
510,
386,
645,
5291,
2891,
2069,
3305,
4138,
3867,
2939,
2603,
2493,
1935,
1066,
1848,
3588,
1015,
1282,
1289,
4609,
697,
1453,
3044,
2666,
3611,
1856,
2412,
54,
719,
1330,
568,
3778,
2459,
1748,
788,
492,
551,
1191,
1000,
488,
3394,
3763,
282,
1799,
348,
2016,
1523,
3155,
2390,
1049,
382,
2019,
1788,
1170,
729,
2968,
3523,
897,
3926,
2785,
2938,
3292,
350,
2319,
3238,
1718,
1717,
2655,
3453,
3143,
4465,
161,
2889,
2980,
2009,
1421,
56,
1908,
1640,
2387,
2232,
1917,
1874,
2477,
4921,
148,
83,
3438,
592,
4245,
2882,
1822,
1055,
741,
115,
1496,
1624,
381,
1638,
4592,
1020,
516,
3214,
458,
947,
4575,
1432,
211,
1514,
2926,
1865,
2142,
189,
852,
1221,
1400,
1486,
882,
2299,
4036,
351,
28,
1122,
700,
6479,
6480,
6481,
6482,
6483, # last 512
# Everything below is of no interest for detection purpose
5508,
6484,
3900,
3414,
3974,
4441,
4024,
3537,
4037,
5628,
5099,
3633,
6485,
3148,
6486,
3636,
5509,
3257,
5510,
5973,
5445,
5872,
4941,
4403,
3174,
4627,
5873,
6276,
2286,
4230,
5446,
5874,
5122,
6102,
6103,
4162,
5447,
5123,
5323,
4849,
6277,
3980,
3851,
5066,
4246,
5774,
5067,
6278,
3001,
2807,
5695,
3346,
5775,
5974,
5158,
5448,
6487,
5975,
5976,
5776,
3598,
6279,
5696,
4806,
4211,
4154,
6280,
6488,
6489,
6490,
6281,
4212,
5037,
3374,
4171,
6491,
4562,
4807,
4722,
4827,
5977,
6104,
4532,
4079,
5159,
5324,
5160,
4404,
3858,
5359,
5875,
3975,
4288,
4610,
3486,
4512,
5325,
3893,
5360,
6282,
6283,
5560,
2522,
4231,
5978,
5186,
5449,
2569,
3878,
6284,
5401,
3578,
4415,
6285,
4656,
5124,
5979,
2506,
4247,
4449,
3219,
3417,
4334,
4969,
4329,
6492,
4576,
4828,
4172,
4416,
4829,
5402,
6286,
3927,
3852,
5361,
4369,
4830,
4477,
4867,
5876,
4173,
6493,
6105,
4657,
6287,
6106,
5877,
5450,
6494,
4155,
4868,
5451,
3700,
5629,
4384,
6288,
6289,
5878,
3189,
4881,
6107,
6290,
6495,
4513,
6496,
4692,
4515,
4723,
5100,
3356,
6497,
6291,
3810,
4080,
5561,
3570,
4430,
5980,
6498,
4355,
5697,
6499,
4724,
6108,
6109,
3764,
4050,
5038,
5879,
4093,
3226,
6292,
5068,
5217,
4693,
3342,
5630,
3504,
4831,
4377,
4466,
4309,
5698,
4431,
5777,
6293,
5778,
4272,
3706,
6110,
5326,
3752,
4676,
5327,
4273,
5403,
4767,
5631,
6500,
5699,
5880,
3475,
5039,
6294,
5562,
5125,
4348,
4301,
4482,
4068,
5126,
4593,
5700,
3380,
3462,
5981,
5563,
3824,
5404,
4970,
5511,
3825,
4738,
6295,
6501,
5452,
4516,
6111,
5881,
5564,
6502,
6296,
5982,
6503,
4213,
4163,
3454,
6504,
6112,
4009,
4450,
6113,
4658,
6297,
6114,
3035,
6505,
6115,
3995,
4904,
4739,
4563,
4942,
4110,
5040,
3661,
3928,
5362,
3674,
6506,
5292,
3612,
4791,
5565,
4149,
5983,
5328,
5259,
5021,
4725,
4577,
4564,
4517,
4364,
6298,
5405,
4578,
5260,
4594,
4156,
4157,
5453,
3592,
3491,
6507,
5127,
5512,
4709,
4922,
5984,
5701,
4726,
4289,
6508,
4015,
6116,
5128,
4628,
3424,
4241,
5779,
6299,
4905,
6509,
6510,
5454,
5702,
5780,
6300,
4365,
4923,
3971,
6511,
5161,
3270,
3158,
5985,
4100,
867,
5129,
5703,
6117,
5363,
3695,
3301,
5513,
4467,
6118,
6512,
5455,
4232,
4242,
4629,
6513,
3959,
4478,
6514,
5514,
5329,
5986,
4850,
5162,
5566,
3846,
4694,
6119,
5456,
4869,
5781,
3779,
6301,
5704,
5987,
5515,
4710,
6302,
5882,
6120,
4392,
5364,
5705,
6515,
6121,
6516,
6517,
3736,
5988,
5457,
5989,
4695,
2457,
5883,
4551,
5782,
6303,
6304,
6305,
5130,
4971,
6122,
5163,
6123,
4870,
3263,
5365,
3150,
4871,
6518,
6306,
5783,
5069,
5706,
3513,
3498,
4409,
5330,
5632,
5366,
5458,
5459,
3991,
5990,
4502,
3324,
5991,
5784,
3696,
4518,
5633,
4119,
6519,
4630,
5634,
4417,
5707,
4832,
5992,
3418,
6124,
5993,
5567,
4768,
5218,
6520,
4595,
3458,
5367,
6125,
5635,
6126,
4202,
6521,
4740,
4924,
6307,
3981,
4069,
4385,
6308,
3883,
2675,
4051,
3834,
4302,
4483,
5568,
5994,
4972,
4101,
5368,
6309,
5164,
5884,
3922,
6127,
6522,
6523,
5261,
5460,
5187,
4164,
5219,
3538,
5516,
4111,
3524,
5995,
6310,
6311,
5369,
3181,
3386,
2484,
5188,
3464,
5569,
3627,
5708,
6524,
5406,
5165,
4677,
4492,
6312,
4872,
4851,
5885,
4468,
5996,
6313,
5709,
5710,
6128,
2470,
5886,
6314,
5293,
4882,
5785,
3325,
5461,
5101,
6129,
5711,
5786,
6525,
4906,
6526,
6527,
4418,
5887,
5712,
4808,
2907,
3701,
5713,
5888,
6528,
3765,
5636,
5331,
6529,
6530,
3593,
5889,
3637,
4943,
3692,
5714,
5787,
4925,
6315,
6130,
5462,
4405,
6131,
6132,
6316,
5262,
6531,
6532,
5715,
3859,
5716,
5070,
4696,
5102,
3929,
5788,
3987,
4792,
5997,
6533,
6534,
3920,
4809,
5000,
5998,
6535,
2974,
5370,
6317,
5189,
5263,
5717,
3826,
6536,
3953,
5001,
4883,
3190,
5463,
5890,
4973,
5999,
4741,
6133,
6134,
3607,
5570,
6000,
4711,
3362,
3630,
4552,
5041,
6318,
6001,
2950,
2953,
5637,
4646,
5371,
4944,
6002,
2044,
4120,
3429,
6319,
6537,
5103,
4833,
6538,
6539,
4884,
4647,
3884,
6003,
6004,
4758,
3835,
5220,
5789,
4565,
5407,
6540,
6135,
5294,
4697,
4852,
6320,
6321,
3206,
4907,
6541,
6322,
4945,
6542,
6136,
6543,
6323,
6005,
4631,
3519,
6544,
5891,
6545,
5464,
3784,
5221,
6546,
5571,
4659,
6547,
6324,
6137,
5190,
6548,
3853,
6549,
4016,
4834,
3954,
6138,
5332,
3827,
4017,
3210,
3546,
4469,
5408,
5718,
3505,
4648,
5790,
5131,
5638,
5791,
5465,
4727,
4318,
6325,
6326,
5792,
4553,
4010,
4698,
3439,
4974,
3638,
4335,
3085,
6006,
5104,
5042,
5166,
5892,
5572,
6327,
4356,
4519,
5222,
5573,
5333,
5793,
5043,
6550,
5639,
5071,
4503,
6328,
6139,
6551,
6140,
3914,
3901,
5372,
6007,
5640,
4728,
4793,
3976,
3836,
4885,
6552,
4127,
6553,
4451,
4102,
5002,
6554,
3686,
5105,
6555,
5191,
5072,
5295,
4611,
5794,
5296,
6556,
5893,
5264,
5894,
4975,
5466,
5265,
4699,
4976,
4370,
4056,
3492,
5044,
4886,
6557,
5795,
4432,
4769,
4357,
5467,
3940,
4660,
4290,
6141,
4484,
4770,
4661,
3992,
6329,
4025,
4662,
5022,
4632,
4835,
4070,
5297,
4663,
4596,
5574,
5132,
5409,
5895,
6142,
4504,
5192,
4664,
5796,
5896,
3885,
5575,
5797,
5023,
4810,
5798,
3732,
5223,
4712,
5298,
4084,
5334,
5468,
6143,
4052,
4053,
4336,
4977,
4794,
6558,
5335,
4908,
5576,
5224,
4233,
5024,
4128,
5469,
5225,
4873,
6008,
5045,
4729,
4742,
4633,
3675,
4597,
6559,
5897,
5133,
5577,
5003,
5641,
5719,
6330,
6560,
3017,
2382,
3854,
4406,
4811,
6331,
4393,
3964,
4946,
6561,
2420,
3722,
6562,
4926,
4378,
3247,
1736,
4442,
6332,
5134,
6333,
5226,
3996,
2918,
5470,
4319,
4003,
4598,
4743,
4744,
4485,
3785,
3902,
5167,
5004,
5373,
4394,
5898,
6144,
4874,
1793,
3997,
6334,
4085,
4214,
5106,
5642,
4909,
5799,
6009,
4419,
4189,
3330,
5899,
4165,
4420,
5299,
5720,
5227,
3347,
6145,
4081,
6335,
2876,
3930,
6146,
3293,
3786,
3910,
3998,
5900,
5300,
5578,
2840,
6563,
5901,
5579,
6147,
3531,
5374,
6564,
6565,
5580,
4759,
5375,
6566,
6148,
3559,
5643,
6336,
6010,
5517,
6337,
6338,
5721,
5902,
3873,
6011,
6339,
6567,
5518,
3868,
3649,
5722,
6568,
4771,
4947,
6569,
6149,
4812,
6570,
2853,
5471,
6340,
6341,
5644,
4795,
6342,
6012,
5723,
6343,
5724,
6013,
4349,
6344,
3160,
6150,
5193,
4599,
4514,
4493,
5168,
4320,
6345,
4927,
3666,
4745,
5169,
5903,
5005,
4928,
6346,
5725,
6014,
4730,
4203,
5046,
4948,
3395,
5170,
6015,
4150,
6016,
5726,
5519,
6347,
5047,
3550,
6151,
6348,
4197,
4310,
5904,
6571,
5581,
2965,
6152,
4978,
3960,
4291,
5135,
6572,
5301,
5727,
4129,
4026,
5905,
4853,
5728,
5472,
6153,
6349,
4533,
2700,
4505,
5336,
4678,
3583,
5073,
2994,
4486,
3043,
4554,
5520,
6350,
6017,
5800,
4487,
6351,
3931,
4103,
5376,
6352,
4011,
4321,
4311,
4190,
5136,
6018,
3988,
3233,
4350,
5906,
5645,
4198,
6573,
5107,
3432,
4191,
3435,
5582,
6574,
4139,
5410,
6353,
5411,
3944,
5583,
5074,
3198,
6575,
6354,
4358,
6576,
5302,
4600,
5584,
5194,
5412,
6577,
6578,
5585,
5413,
5303,
4248,
5414,
3879,
4433,
6579,
4479,
5025,
4854,
5415,
6355,
4760,
4772,
3683,
2978,
4700,
3797,
4452,
3965,
3932,
3721,
4910,
5801,
6580,
5195,
3551,
5907,
3221,
3471,
3029,
6019,
3999,
5908,
5909,
5266,
5267,
3444,
3023,
3828,
3170,
4796,
5646,
4979,
4259,
6356,
5647,
5337,
3694,
6357,
5648,
5338,
4520,
4322,
5802,
3031,
3759,
4071,
6020,
5586,
4836,
4386,
5048,
6581,
3571,
4679,
4174,
4949,
6154,
4813,
3787,
3402,
3822,
3958,
3215,
3552,
5268,
4387,
3933,
4950,
4359,
6021,
5910,
5075,
3579,
6358,
4234,
4566,
5521,
6359,
3613,
5049,
6022,
5911,
3375,
3702,
3178,
4911,
5339,
4521,
6582,
6583,
4395,
3087,
3811,
5377,
6023,
6360,
6155,
4027,
5171,
5649,
4421,
4249,
2804,
6584,
2270,
6585,
4000,
4235,
3045,
6156,
5137,
5729,
4140,
4312,
3886,
6361,
4330,
6157,
4215,
6158,
3500,
3676,
4929,
4331,
3713,
4930,
5912,
4265,
3776,
3368,
5587,
4470,
4855,
3038,
4980,
3631,
6159,
6160,
4132,
4680,
6161,
6362,
3923,
4379,
5588,
4255,
6586,
4121,
6587,
6363,
4649,
6364,
3288,
4773,
4774,
6162,
6024,
6365,
3543,
6588,
4274,
3107,
3737,
5050,
5803,
4797,
4522,
5589,
5051,
5730,
3714,
4887,
5378,
4001,
4523,
6163,
5026,
5522,
4701,
4175,
2791,
3760,
6589,
5473,
4224,
4133,
3847,
4814,
4815,
4775,
3259,
5416,
6590,
2738,
6164,
6025,
5304,
3733,
5076,
5650,
4816,
5590,
6591,
6165,
6592,
3934,
5269,
6593,
3396,
5340,
6594,
5804,
3445,
3602,
4042,
4488,
5731,
5732,
3525,
5591,
4601,
5196,
6166,
6026,
5172,
3642,
4612,
3202,
4506,
4798,
6366,
3818,
5108,
4303,
5138,
5139,
4776,
3332,
4304,
2915,
3415,
4434,
5077,
5109,
4856,
2879,
5305,
4817,
6595,
5913,
3104,
3144,
3903,
4634,
5341,
3133,
5110,
5651,
5805,
6167,
4057,
5592,
2945,
4371,
5593,
6596,
3474,
4182,
6367,
6597,
6168,
4507,
4279,
6598,
2822,
6599,
4777,
4713,
5594,
3829,
6169,
3887,
5417,
6170,
3653,
5474,
6368,
4216,
2971,
5228,
3790,
4579,
6369,
5733,
6600,
6601,
4951,
4746,
4555,
6602,
5418,
5475,
6027,
3400,
4665,
5806,
6171,
4799,
6028,
5052,
6172,
3343,
4800,
4747,
5006,
6370,
4556,
4217,
5476,
4396,
5229,
5379,
5477,
3839,
5914,
5652,
5807,
4714,
3068,
4635,
5808,
6173,
5342,
4192,
5078,
5419,
5523,
5734,
6174,
4557,
6175,
4602,
6371,
6176,
6603,
5809,
6372,
5735,
4260,
3869,
5111,
5230,
6029,
5112,
6177,
3126,
4681,
5524,
5915,
2706,
3563,
4748,
3130,
6178,
4018,
5525,
6604,
6605,
5478,
4012,
4837,
6606,
4534,
4193,
5810,
4857,
3615,
5479,
6030,
4082,
3697,
3539,
4086,
5270,
3662,
4508,
4931,
5916,
4912,
5811,
5027,
3888,
6607,
4397,
3527,
3302,
3798,
2775,
2921,
2637,
3966,
4122,
4388,
4028,
4054,
1633,
4858,
5079,
3024,
5007,
3982,
3412,
5736,
6608,
3426,
3236,
5595,
3030,
6179,
3427,
3336,
3279,
3110,
6373,
3874,
3039,
5080,
5917,
5140,
4489,
3119,
6374,
5812,
3405,
4494,
6031,
4666,
4141,
6180,
4166,
6032,
5813,
4981,
6609,
5081,
4422,
4982,
4112,
3915,
5653,
3296,
3983,
6375,
4266,
4410,
5654,
6610,
6181,
3436,
5082,
6611,
5380,
6033,
3819,
5596,
4535,
5231,
5306,
5113,
6612,
4952,
5918,
4275,
3113,
6613,
6376,
6182,
6183,
5814,
3073,
4731,
4838,
5008,
3831,
6614,
4888,
3090,
3848,
4280,
5526,
5232,
3014,
5655,
5009,
5737,
5420,
5527,
6615,
5815,
5343,
5173,
5381,
4818,
6616,
3151,
4953,
6617,
5738,
2796,
3204,
4360,
2989,
4281,
5739,
5174,
5421,
5197,
3132,
5141,
3849,
5142,
5528,
5083,
3799,
3904,
4839,
5480,
2880,
4495,
3448,
6377,
6184,
5271,
5919,
3771,
3193,
6034,
6035,
5920,
5010,
6036,
5597,
6037,
6378,
6038,
3106,
5422,
6618,
5423,
5424,
4142,
6619,
4889,
5084,
4890,
4313,
5740,
6620,
3437,
5175,
5307,
5816,
4199,
5198,
5529,
5817,
5199,
5656,
4913,
5028,
5344,
3850,
6185,
2955,
5272,
5011,
5818,
4567,
4580,
5029,
5921,
3616,
5233,
6621,
6622,
6186,
4176,
6039,
6379,
6380,
3352,
5200,
5273,
2908,
5598,
5234,
3837,
5308,
6623,
6624,
5819,
4496,
4323,
5309,
5201,
6625,
6626,
4983,
3194,
3838,
4167,
5530,
5922,
5274,
6381,
6382,
3860,
3861,
5599,
3333,
4292,
4509,
6383,
3553,
5481,
5820,
5531,
4778,
6187,
3955,
3956,
4324,
4389,
4218,
3945,
4325,
3397,
2681,
5923,
4779,
5085,
4019,
5482,
4891,
5382,
5383,
6040,
4682,
3425,
5275,
4094,
6627,
5310,
3015,
5483,
5657,
4398,
5924,
3168,
4819,
6628,
5925,
6629,
5532,
4932,
4613,
6041,
6630,
4636,
6384,
4780,
4204,
5658,
4423,
5821,
3989,
4683,
5822,
6385,
4954,
6631,
5345,
6188,
5425,
5012,
5384,
3894,
6386,
4490,
4104,
6632,
5741,
5053,
6633,
5823,
5926,
5659,
5660,
5927,
6634,
5235,
5742,
5824,
4840,
4933,
4820,
6387,
4859,
5928,
4955,
6388,
4143,
3584,
5825,
5346,
5013,
6635,
5661,
6389,
5014,
5484,
5743,
4337,
5176,
5662,
6390,
2836,
6391,
3268,
6392,
6636,
6042,
5236,
6637,
4158,
6638,
5744,
5663,
4471,
5347,
3663,
4123,
5143,
4293,
3895,
6639,
6640,
5311,
5929,
5826,
3800,
6189,
6393,
6190,
5664,
5348,
3554,
3594,
4749,
4603,
6641,
5385,
4801,
6043,
5827,
4183,
6642,
5312,
5426,
4761,
6394,
5665,
6191,
4715,
2669,
6643,
6644,
5533,
3185,
5427,
5086,
5930,
5931,
5386,
6192,
6044,
6645,
4781,
4013,
5745,
4282,
4435,
5534,
4390,
4267,
6045,
5746,
4984,
6046,
2743,
6193,
3501,
4087,
5485,
5932,
5428,
4184,
4095,
5747,
4061,
5054,
3058,
3862,
5933,
5600,
6646,
5144,
3618,
6395,
3131,
5055,
5313,
6396,
4650,
4956,
3855,
6194,
3896,
5202,
4985,
4029,
4225,
6195,
6647,
5828,
5486,
5829,
3589,
3002,
6648,
6397,
4782,
5276,
6649,
6196,
6650,
4105,
3803,
4043,
5237,
5830,
6398,
4096,
3643,
6399,
3528,
6651,
4453,
3315,
4637,
6652,
3984,
6197,
5535,
3182,
3339,
6653,
3096,
2660,
6400,
6654,
3449,
5934,
4250,
4236,
6047,
6401,
5831,
6655,
5487,
3753,
4062,
5832,
6198,
6199,
6656,
3766,
6657,
3403,
4667,
6048,
6658,
4338,
2897,
5833,
3880,
2797,
3780,
4326,
6659,
5748,
5015,
6660,
5387,
4351,
5601,
4411,
6661,
3654,
4424,
5935,
4339,
4072,
5277,
4568,
5536,
6402,
6662,
5238,
6663,
5349,
5203,
6200,
5204,
6201,
5145,
4536,
5016,
5056,
4762,
5834,
4399,
4957,
6202,
6403,
5666,
5749,
6664,
4340,
6665,
5936,
5177,
5667,
6666,
6667,
3459,
4668,
6404,
6668,
6669,
4543,
6203,
6670,
4276,
6405,
4480,
5537,
6671,
4614,
5205,
5668,
6672,
3348,
2193,
4763,
6406,
6204,
5937,
5602,
4177,
5669,
3419,
6673,
4020,
6205,
4443,
4569,
5388,
3715,
3639,
6407,
6049,
4058,
6206,
6674,
5938,
4544,
6050,
4185,
4294,
4841,
4651,
4615,
5488,
6207,
6408,
6051,
5178,
3241,
3509,
5835,
6208,
4958,
5836,
4341,
5489,
5278,
6209,
2823,
5538,
5350,
5206,
5429,
6675,
4638,
4875,
4073,
3516,
4684,
4914,
4860,
5939,
5603,
5389,
6052,
5057,
3237,
5490,
3791,
6676,
6409,
6677,
4821,
4915,
4106,
5351,
5058,
4243,
5539,
4244,
5604,
4842,
4916,
5239,
3028,
3716,
5837,
5114,
5605,
5390,
5940,
5430,
6210,
4332,
6678,
5540,
4732,
3667,
3840,
6053,
4305,
3408,
5670,
5541,
6410,
2744,
5240,
5750,
6679,
3234,
5606,
6680,
5607,
5671,
3608,
4283,
4159,
4400,
5352,
4783,
6681,
6411,
6682,
4491,
4802,
6211,
6412,
5941,
6413,
6414,
5542,
5751,
6683,
4669,
3734,
5942,
6684,
6415,
5943,
5059,
3328,
4670,
4144,
4268,
6685,
6686,
6687,
6688,
4372,
3603,
6689,
5944,
5491,
4373,
3440,
6416,
5543,
4784,
4822,
5608,
3792,
4616,
5838,
5672,
3514,
5391,
6417,
4892,
6690,
4639,
6691,
6054,
5673,
5839,
6055,
6692,
6056,
5392,
6212,
4038,
5544,
5674,
4497,
6057,
6693,
5840,
4284,
5675,
4021,
4545,
5609,
6418,
4454,
6419,
6213,
4113,
4472,
5314,
3738,
5087,
5279,
4074,
5610,
4959,
4063,
3179,
4750,
6058,
6420,
6214,
3476,
4498,
4716,
5431,
4960,
4685,
6215,
5241,
6694,
6421,
6216,
6695,
5841,
5945,
6422,
3748,
5946,
5179,
3905,
5752,
5545,
5947,
4374,
6217,
4455,
6423,
4412,
6218,
4803,
5353,
6696,
3832,
5280,
6219,
4327,
4702,
6220,
6221,
6059,
4652,
5432,
6424,
3749,
4751,
6425,
5753,
4986,
5393,
4917,
5948,
5030,
5754,
4861,
4733,
6426,
4703,
6697,
6222,
4671,
5949,
4546,
4961,
5180,
6223,
5031,
3316,
5281,
6698,
4862,
4295,
4934,
5207,
3644,
6427,
5842,
5950,
6428,
6429,
4570,
5843,
5282,
6430,
6224,
5088,
3239,
6060,
6699,
5844,
5755,
6061,
6431,
2701,
5546,
6432,
5115,
5676,
4039,
3993,
3327,
4752,
4425,
5315,
6433,
3941,
6434,
5677,
4617,
4604,
3074,
4581,
6225,
5433,
6435,
6226,
6062,
4823,
5756,
5116,
6227,
3717,
5678,
4717,
5845,
6436,
5679,
5846,
6063,
5847,
6064,
3977,
3354,
6437,
3863,
5117,
6228,
5547,
5394,
4499,
4524,
6229,
4605,
6230,
4306,
4500,
6700,
5951,
6065,
3693,
5952,
5089,
4366,
4918,
6701,
6231,
5548,
6232,
6702,
6438,
4704,
5434,
6703,
6704,
5953,
4168,
6705,
5680,
3420,
6706,
5242,
4407,
6066,
3812,
5757,
5090,
5954,
4672,
4525,
3481,
5681,
4618,
5395,
5354,
5316,
5955,
6439,
4962,
6707,
4526,
6440,
3465,
4673,
6067,
6441,
5682,
6708,
5435,
5492,
5758,
5683,
4619,
4571,
4674,
4804,
4893,
4686,
5493,
4753,
6233,
6068,
4269,
6442,
6234,
5032,
4705,
5146,
5243,
5208,
5848,
6235,
6443,
4963,
5033,
4640,
4226,
6236,
5849,
3387,
6444,
6445,
4436,
4437,
5850,
4843,
5494,
4785,
4894,
6709,
4361,
6710,
5091,
5956,
3331,
6237,
4987,
5549,
6069,
6711,
4342,
3517,
4473,
5317,
6070,
6712,
6071,
4706,
6446,
5017,
5355,
6713,
6714,
4988,
5436,
6447,
4734,
5759,
6715,
4735,
4547,
4456,
4754,
6448,
5851,
6449,
6450,
3547,
5852,
5318,
6451,
6452,
5092,
4205,
6716,
6238,
4620,
4219,
5611,
6239,
6072,
4481,
5760,
5957,
5958,
4059,
6240,
6453,
4227,
4537,
6241,
5761,
4030,
4186,
5244,
5209,
3761,
4457,
4876,
3337,
5495,
5181,
6242,
5959,
5319,
5612,
5684,
5853,
3493,
5854,
6073,
4169,
5613,
5147,
4895,
6074,
5210,
6717,
5182,
6718,
3830,
6243,
2798,
3841,
6075,
6244,
5855,
5614,
3604,
4606,
5496,
5685,
5118,
5356,
6719,
6454,
5960,
5357,
5961,
6720,
4145,
3935,
4621,
5119,
5962,
4261,
6721,
6455,
4786,
5963,
4375,
4582,
6245,
6246,
6247,
6076,
5437,
4877,
5856,
3376,
4380,
6248,
4160,
6722,
5148,
6456,
5211,
6457,
6723,
4718,
6458,
6724,
6249,
5358,
4044,
3297,
6459,
6250,
5857,
5615,
5497,
5245,
6460,
5498,
6725,
6251,
6252,
5550,
3793,
5499,
2959,
5396,
6461,
6462,
4572,
5093,
5500,
5964,
3806,
4146,
6463,
4426,
5762,
5858,
6077,
6253,
4755,
3967,
4220,
5965,
6254,
4989,
5501,
6464,
4352,
6726,
6078,
4764,
2290,
5246,
3906,
5438,
5283,
3767,
4964,
2861,
5763,
5094,
6255,
6256,
4622,
5616,
5859,
5860,
4707,
6727,
4285,
4708,
4824,
5617,
6257,
5551,
4787,
5212,
4965,
4935,
4687,
6465,
6728,
6466,
5686,
6079,
3494,
4413,
2995,
5247,
5966,
5618,
6729,
5967,
5764,
5765,
5687,
5502,
6730,
6731,
6080,
5397,
6467,
4990,
6258,
6732,
4538,
5060,
5619,
6733,
4719,
5688,
5439,
5018,
5149,
5284,
5503,
6734,
6081,
4607,
6259,
5120,
3645,
5861,
4583,
6260,
4584,
4675,
5620,
4098,
5440,
6261,
4863,
2379,
3306,
4585,
5552,
5689,
4586,
5285,
6735,
4864,
6736,
5286,
6082,
6737,
4623,
3010,
4788,
4381,
4558,
5621,
4587,
4896,
3698,
3161,
5248,
4353,
4045,
6262,
3754,
5183,
4588,
6738,
6263,
6739,
6740,
5622,
3936,
6741,
6468,
6742,
6264,
5095,
6469,
4991,
5968,
6743,
4992,
6744,
6083,
4897,
6745,
4256,
5766,
4307,
3108,
3968,
4444,
5287,
3889,
4343,
6084,
4510,
6085,
4559,
6086,
4898,
5969,
6746,
5623,
5061,
4919,
5249,
5250,
5504,
5441,
6265,
5320,
4878,
3242,
5862,
5251,
3428,
6087,
6747,
4237,
5624,
5442,
6266,
5553,
4539,
6748,
2585,
3533,
5398,
4262,
6088,
5150,
4736,
4438,
6089,
6267,
5505,
4966,
6749,
6268,
6750,
6269,
5288,
5554,
3650,
6090,
6091,
4624,
6092,
5690,
6751,
5863,
4270,
5691,
4277,
5555,
5864,
6752,
5692,
4720,
4865,
6470,
5151,
4688,
4825,
6753,
3094,
6754,
6471,
3235,
4653,
6755,
5213,
5399,
6756,
3201,
4589,
5865,
4967,
6472,
5866,
6473,
5019,
3016,
6757,
5321,
4756,
3957,
4573,
6093,
4993,
5767,
4721,
6474,
6758,
5625,
6759,
4458,
6475,
6270,
6760,
5556,
4994,
5214,
5252,
6271,
3875,
5768,
6094,
5034,
5506,
4376,
5769,
6761,
2120,
6476,
5253,
5770,
6762,
5771,
5970,
3990,
5971,
5557,
5558,
5772,
6477,
6095,
2787,
4641,
5972,
5121,
6096,
6097,
6272,
6763,
3703,
5867,
5507,
6273,
4206,
6274,
4789,
6098,
6764,
3619,
3646,
3833,
3804,
2394,
3788,
4936,
3978,
4866,
4899,
6099,
6100,
5559,
6478,
6765,
3599,
5868,
6101,
5869,
5870,
6275,
6766,
4527,
6767,
)
# flake8: noqa
|
extractor | zype | # coding: utf-8
from __future__ import unicode_literals
import re
from ..compat import compat_HTTPError
from ..utils import ExtractorError, dict_get, int_or_none, js_to_json, parse_iso8601
from .common import InfoExtractor
class ZypeIE(InfoExtractor):
_ID_RE = r"[\da-fA-F]+"
_COMMON_RE = r"//player\.zype\.com/embed/%s\.(?:js|json|html)\?.*?(?:access_token|(?:ap[ip]|player)_key)="
_VALID_URL = r"https?:%s[^&]+" % (_COMMON_RE % ("(?P<id>%s)" % _ID_RE))
_TEST = {
"url": "https://player.zype.com/embed/5b400b834b32992a310622b9.js?api_key=jZ9GUhRmxcPvX7M3SlfejB6Hle9jyHTdk2jVxG7wOHPLODgncEKVdPYBhuz9iWXQ&autoplay=false&controls=true&da=false",
"md5": "eaee31d474c76a955bdaba02a505c595",
"info_dict": {
"id": "5b400b834b32992a310622b9",
"ext": "mp4",
"title": "Smoky Barbecue Favorites",
"thumbnail": r"re:^https?://.*\.jpe?g",
"description": "md5:5ff01e76316bd8d46508af26dc86023b",
"timestamp": 1504915200,
"upload_date": "20170909",
},
}
@staticmethod
def _extract_urls(webpage):
return [
mobj.group("url")
for mobj in re.finditer(
r'<script[^>]+\bsrc=(["\'])(?P<url>(?:https?:)?%s.+?)\1'
% (ZypeIE._COMMON_RE % ZypeIE._ID_RE),
webpage,
)
]
def _real_extract(self, url):
video_id = self._match_id(url)
try:
response = self._download_json(
re.sub(r"\.(?:js|html)\?", ".json?", url), video_id
)["response"]
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code in (
400,
401,
403,
):
raise ExtractorError(
self._parse_json(e.cause.read().decode(), video_id)["message"],
expected=True,
)
raise
body = response["body"]
video = response["video"]
title = video["title"]
if isinstance(body, dict):
formats = []
for output in body.get("outputs", []):
output_url = output.get("url")
if not output_url:
continue
name = output.get("name")
if name == "m3u8":
formats = self._extract_m3u8_formats(
output_url,
video_id,
"mp4",
"m3u8_native",
m3u8_id="hls",
fatal=False,
)
else:
f = {
"format_id": name,
"tbr": int_or_none(output.get("bitrate")),
"url": output_url,
}
if name in ("m4a", "mp3"):
f["vcodec"] = "none"
else:
f.update(
{
"height": int_or_none(output.get("height")),
"width": int_or_none(output.get("width")),
}
)
formats.append(f)
text_tracks = body.get("subtitles") or []
else:
m3u8_url = self._search_regex(
r'(["\'])(?P<url>(?:(?!\1).)+\.m3u8(?:(?!\1).)*)\1',
body,
"m3u8 url",
group="url",
default=None,
)
if not m3u8_url:
source = self._search_regex(
r"(?s)sources\s*:\s*\[\s*({.+?})\s*\]", body, "source"
)
def get_attr(key):
return self._search_regex(
r'\b%s\s*:\s*([\'"])(?P<val>(?:(?!\1).)+)\1' % key,
source,
key,
group="val",
)
if get_attr("integration") == "verizon-media":
m3u8_url = "https://content.uplynk.com/%s.m3u8" % get_attr("id")
formats = self._extract_m3u8_formats(
m3u8_url, video_id, "mp4", "m3u8_native", m3u8_id="hls"
)
text_tracks = self._search_regex(
r"textTracks\s*:\s*(\[[^]]+\])", body, "text tracks", default=None
)
if text_tracks:
text_tracks = self._parse_json(text_tracks, video_id, js_to_json, False)
self._sort_formats(formats)
subtitles = {}
if text_tracks:
for text_track in text_tracks:
tt_url = dict_get(text_track, ("file", "src"))
if not tt_url:
continue
subtitles.setdefault(text_track.get("label") or "English", []).append(
{
"url": tt_url,
}
)
thumbnails = []
for thumbnail in video.get("thumbnails", []):
thumbnail_url = thumbnail.get("url")
if not thumbnail_url:
continue
thumbnails.append(
{
"url": thumbnail_url,
"width": int_or_none(thumbnail.get("width")),
"height": int_or_none(thumbnail.get("height")),
}
)
return {
"id": video_id,
"display_id": video.get("friendly_title"),
"title": title,
"thumbnails": thumbnails,
"description": dict_get(
video, ("description", "ott_description", "short_description")
),
"timestamp": parse_iso8601(video.get("published_at")),
"duration": int_or_none(video.get("duration")),
"view_count": int_or_none(video.get("request_count")),
"average_rating": int_or_none(video.get("rating")),
"season_number": int_or_none(video.get("season")),
"episode_number": int_or_none(video.get("episode")),
"formats": formats,
"subtitles": subtitles,
}
|
lib | signals | # -*- coding: utf-8 -*-
"""Contains ``ckan`` and ``ckanext`` namespaces for signals as well as a bunch
of predefined core-level signals.
Check :doc:`signals` for extra detais.
"""
import flask.signals
import flask_login.signals
from blinker import Namespace
ckan = Namespace()
ckanext = Namespace()
request_started = ckan.signal("request_started")
"""This signal is sent when the request context is set up, before any
request processing happens.
"""
flask.signals.request_started.connect(request_started.send)
request_finished = ckan.signal("request_finished")
"""This signal is sent right before the response is sent to the
client.
"""
flask.signals.request_finished.connect(request_finished.send)
register_blueprint = ckan.signal("register_blueprint")
"""This signal is sent when a blueprint for dataset/resource/group/organization
is going to be registered inside the application.
"""
resource_download = ckan.signal("resource_download")
"""This signal is sent just before a file from an uploaded resource is sent
to the user.
"""
user_logged_in = ckan.signal("logged_in")
""" Sent when a user is logged in.
"""
flask_login.signals.user_logged_in.connect(user_logged_in.send)
user_logged_out = ckan.signal("logged_out")
"""Sent when a user is logged out
"""
flask_login.signals.user_logged_out.connect(user_logged_out.send)
failed_login = ckan.signal("failed_login")
"""This signal is sent after failed login attempt.
"""
user_created = ckan.signal("user_created")
"""This signal is sent when new user created.
"""
request_password_reset = ckan.signal("request_password_reset")
"""This signal is sent just after mail with password reset link sent
to user.
"""
perform_password_reset = ckan.signal("perform_password_reset")
"""This signal is sent when user submitted password reset form
providing new password.
"""
action_succeeded = ckan.signal("action_succeed")
"""This signal is sent when an action finished without an exception.
"""
datastore_upsert = ckanext.signal("datastore_upsert")
"""This signal is sent after datasetore records inserted/updated via
`datastore_upsert`.
"""
datastore_delete = ckanext.signal("datastore_delete")
"""This signal is sent after successful call to `datastore_delete`.
"""
|
clientScripts | post_store_aip_hook | #!/usr/bin/env python
import argparse
import os
import shutil
import sys
import django
import requests
django.setup()
import elasticSearchFunctions
import storageService as storage_service
from archivematicaFunctions import find_transfer_path_from_ingest
# archivematicaCommon
from custom_handlers import get_script_logger
from django.conf import settings as mcpclient_settings
from django.db import transaction
# dashboard
from main import models
logger = get_script_logger("archivematica.mcp.client.post_store_aip_hook")
COMPLETED = 0
NO_ACTION = 1
ERROR = 2
def delete_transfer_directory(job, sip_uuid):
"""Delete the transfer directory that sourced this SIP.
This is only expected to work when the SIP was not arranged in backlog.
"""
current_location = (
models.File.objects.filter(
removedtime__isnull=True,
sip_id=sip_uuid,
transfer__currentlocation__isnull=False,
)
.values_list("transfer__currentlocation", flat=True)
.distinct()
.get()
)
transfer_path = os.path.abspath(
find_transfer_path_from_ingest(
current_location, mcpclient_settings.SHARED_DIRECTORY
)
)
if not transfer_path.startswith(mcpclient_settings.PROCESSING_DIRECTORY):
raise Exception("Transfer directory was found in an unexpected location.")
shutil.rmtree(transfer_path, ignore_errors=False)
return transfer_path
def dspace_handle_to_archivesspace(job, sip_uuid):
"""Fetch the DSpace handle from the Storage Service and send to ArchivesSpace."""
# Get association to ArchivesSpace if it exists
try:
digital_object = models.ArchivesSpaceDigitalObject.objects.get(sip_id=sip_uuid)
except models.ArchivesSpaceDigitalObject.DoesNotExist:
job.pyprint("SIP", sip_uuid, "not associated with an ArchivesSpace component")
return NO_ACTION
job.pyprint(
"Digital Object",
digital_object.remoteid,
"for SIP",
digital_object.sip_id,
"found",
)
logger.info(
"Digital Object %s for SIP %s found",
digital_object.remoteid,
digital_object.sip_id,
)
# Get dspace handle from SS
file_info = storage_service.get_file_info(uuid=sip_uuid)[0]
try:
handle = file_info["misc_attributes"]["handle"]
except KeyError:
job.pyprint("AIP has no DSpace handle stored")
return NO_ACTION
job.pyprint("DSpace handle:", handle)
logger.info("DSpace handle: %s", handle)
# POST Dspace handle to ArchivesSpace
# Get ArchivesSpace config
config = models.DashboardSetting.objects.get_dict("upload-archivesspace_v0.0")
archivesspace_url = config["base_url"]
# Log in
url = archivesspace_url + "/users/" + config["user"] + "/login"
params = {"password": config["passwd"]}
logger.debug("Log in to ArchivesSpace URL: %s", url)
response = requests.post(
url, params=params, timeout=mcpclient_settings.AGENTARCHIVES_CLIENT_TIMEOUT
)
logger.debug("Response: %s %s", response, response.content)
session_id = response.json()["session"]
headers = {"X-ArchivesSpace-Session": session_id}
# Get Digital Object from ArchivesSpace
url = archivesspace_url + digital_object.remoteid
logger.debug("Get Digital Object info URL: %s", url)
response = requests.get(
url, headers=headers, timeout=mcpclient_settings.AGENTARCHIVES_CLIENT_TIMEOUT
)
logger.debug("Response: %s %s", response, response.content)
body = response.json()
# Update
url = archivesspace_url + digital_object.remoteid
file_version = {
"file_uri": handle,
"use_statement": config["use_statement"],
"xlink_show_attribute": config["xlink_show"],
"xlink_actuate_attribute": config["xlink_actuate"],
}
body["file_versions"].append(file_version)
logger.debug("Modified Digital Object: %s", body)
response = requests.post(
url,
headers=headers,
json=body,
timeout=mcpclient_settings.AGENTARCHIVES_CLIENT_TIMEOUT,
)
job.pyprint("Update response:", response, response.content)
logger.debug("Response: %s %s", response, response.content)
if response.status_code != 200:
job.pyprint("Error updating", digital_object.remoteid)
return ERROR
return COMPLETED
def post_store_hook(job, sip_uuid):
"""
Hook for doing any work after an AIP is stored successfully.
"""
update_es = "transfers" in mcpclient_settings.SEARCH_ENABLED
if update_es:
elasticSearchFunctions.setup_reading_from_conf(mcpclient_settings)
client = elasticSearchFunctions.get_client()
else:
logger.info("Skipping indexing: Transfers indexing is currently disabled.")
# SIP ARRANGEMENT
# Mark files in this SIP as in an AIP (aip_created)
file_uuids = models.File.objects.filter(sip=sip_uuid).values_list("uuid", flat=True)
models.SIPArrange.objects.filter(file_uuid__in=file_uuids).update(aip_created=True)
# Check if any of component transfers are completely stored
# TODO Storage service should index AIPs, knows when to update ES
transfer_uuids = set(
models.SIPArrange.objects.filter(file_uuid__in=file_uuids).values_list(
"transfer_uuid", flat=True
)
)
for transfer_uuid in transfer_uuids:
job.pyprint("Checking if transfer", transfer_uuid, "is fully stored...")
arranged_uuids = set(
models.SIPArrange.objects.filter(transfer_uuid=transfer_uuid)
.filter(aip_created=True)
.values_list("file_uuid", flat=True)
)
backlog_uuids = set(
models.File.objects.filter(transfer=transfer_uuid).values_list(
"uuid", flat=True
)
)
# If all backlog UUIDs have been arranged
if arranged_uuids == backlog_uuids:
job.pyprint(
"Transfer",
transfer_uuid,
"fully stored, sending delete request to storage service, deleting from transfer backlog",
)
# Submit delete req to SS (not actually delete), remove from ES
storage_service.request_file_deletion(
uuid=transfer_uuid,
user_id=0,
user_email="archivematica system",
reason_for_deletion="All files in Transfer are now in AIPs.",
)
if update_es:
elasticSearchFunctions.remove_sip_transfer_files(client, transfer_uuid)
# DSPACE HANDLE TO ARCHIVESSPACE
dspace_handle_to_archivesspace(job, sip_uuid)
# POST-STORE CALLBACK
storage_service.post_store_aip_callback(sip_uuid)
# When not using SIP arrangement, we perform best-effort deletion of the
# original transfer directory under currentlyProcessing.
if not transfer_uuids:
try:
transfer_dir = delete_transfer_directory(job, sip_uuid)
except Exception as err:
job.pyprint("Failed to delete transfer directory: ", err, file=sys.stderr)
return
job.pyprint("Transfer directory deleted: ", transfer_dir)
def call(jobs):
parser = argparse.ArgumentParser()
parser.add_argument("sip_uuid", help="%SIPUUID%")
with transaction.atomic():
for job in jobs:
with job.JobContext(logger=logger):
args = parser.parse_args(job.args[1:])
job.set_status(post_store_hook(job, args.sip_uuid))
|
ui | app | import argparse
import logging
import os
import re
import meshroom
from meshroom.core import nodesDesc
from meshroom.core.taskManager import TaskManager
from meshroom.ui import commands, components
from meshroom.ui.components.clipboard import ClipboardHelper
from meshroom.ui.components.filepath import FilepathHelper
from meshroom.ui.components.scene3D import Scene3DHelper, Transformations3DHelper
from meshroom.ui.components.thumbnail import ThumbnailCache
from meshroom.ui.palette import PaletteManager
from meshroom.ui.reconstruction import Reconstruction
from meshroom.ui.utils import QmlInstantEngine
from PySide2 import QtCore
from PySide2.QtCore import (
Property,
QJsonValue,
QSettings,
Qt,
QtMsgType,
QUrl,
Signal,
Slot,
qInstallMessageHandler,
)
from PySide2.QtGui import QIcon
from PySide2.QtWidgets import QApplication
class MessageHandler(object):
"""
MessageHandler that translates Qt logs to Python logging system.
Also contains and filters a list of blacklisted QML warnings that end up in the
standard error even when setOutputWarningsToStandardError is set to false on the engine.
"""
outputQmlWarnings = bool(os.environ.get("MESHROOM_OUTPUT_QML_WARNINGS", False))
logFunctions = {
QtMsgType.QtDebugMsg: logging.debug,
QtMsgType.QtWarningMsg: logging.warning,
QtMsgType.QtInfoMsg: logging.info,
QtMsgType.QtFatalMsg: logging.fatal,
QtMsgType.QtCriticalMsg: logging.critical,
QtMsgType.QtSystemMsg: logging.critical,
}
# Warnings known to be inoffensive and related to QML but not silenced
# even when 'MESHROOM_OUTPUT_QML_WARNINGS' is set to False
qmlWarningsBlacklist = (
'Failed to download scene at QUrl("")',
"QVariant(Invalid) Please check your QParameters",
"Texture will be invalid for this frame",
)
@classmethod
def handler(cls, messageType, context, message):
"""Message handler remapping Qt logs to Python logging system."""
if not cls.outputQmlWarnings:
# If MESHROOM_OUTPUT_QML_WARNINGS is not set and an error in qml files happen we're
# left without any output except "QQmlApplicationEngine failed to load component".
# This is extremely hard to debug to someone who does not know about
# MESHROOM_OUTPUT_QML_WARNINGS beforehand because by default Qml will output errors to
# stdout.
if "QQmlApplicationEngine failed to load component" in message:
logging.warning(
"Set MESHROOM_OUTPUT_QML_WARNINGS=1 to get a detailed error message."
)
# discard blacklisted Qt messages related to QML when 'output qml warnings' is not enabled
elif any(w in message for w in cls.qmlWarningsBlacklist):
return
MessageHandler.logFunctions[messageType](message)
class MeshroomApp(QApplication):
"""Meshroom UI Application."""
def __init__(self, args):
QtArgs = [args[0], "-style", "fusion"] + args[
1:
] # force Fusion style by default
parser = argparse.ArgumentParser(
prog=args[0], description="Launch Meshroom UI.", add_help=True
)
parser.add_argument(
"project",
metavar="PROJECT",
type=str,
nargs="?",
help="Meshroom project file (e.g. myProject.mg) or folder with images to reconstruct.",
)
parser.add_argument(
"-i",
"--import",
metavar="IMAGES/FOLDERS",
type=str,
nargs="*",
help="Import images or folder with images to reconstruct.",
)
parser.add_argument(
"-I",
"--importRecursive",
metavar="FOLDERS",
type=str,
nargs="*",
help="Import images to reconstruct from specified folder and sub-folders.",
)
parser.add_argument(
"-s",
"--save",
metavar="PROJECT.mg",
type=str,
default="",
help="Save the created scene.",
)
parser.add_argument(
"-p",
"--pipeline",
metavar="FILE.mg/" + "/".join(meshroom.core.pipelineTemplates),
type=str,
default=os.environ.get("MESHROOM_DEFAULT_PIPELINE", "photogrammetry"),
help="Override the default Meshroom pipeline with this external or template graph.",
)
parser.add_argument(
"--submitLabel",
metavar="SUBMITLABEL",
type=str,
help="Label of a node in the submitter",
default="{projectName} [Meshroom]",
)
parser.add_argument(
"--verbose",
help="Verbosity level",
default=os.environ.get("MESHROOM_VERBOSE", "warning"),
choices=["fatal", "error", "warning", "info", "debug", "trace"],
)
args = parser.parse_args(args[1:])
logStringToPython = {
"fatal": logging.FATAL,
"error": logging.ERROR,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG,
"trace": logging.DEBUG,
}
logging.getLogger().setLevel(logStringToPython[args.verbose])
QApplication.setAttribute(Qt.AA_EnableHighDpiScaling)
super(MeshroomApp, self).__init__(QtArgs)
self.setOrganizationName("AliceVision")
self.setApplicationName("Meshroom")
self.setApplicationVersion(meshroom.__version_label__)
font = self.font()
font.setPointSize(9)
self.setFont(font)
pwd = os.path.dirname(__file__)
self.setWindowIcon(QIcon(os.path.join(pwd, "img/meshroom.svg")))
# Initialize thumbnail cache:
# - read related environment variables
# - clean cache directory and make sure it exists on disk
ThumbnailCache.initialize()
# QML engine setup
qmlDir = os.path.join(pwd, "qml")
url = os.path.join(qmlDir, "main.qml")
self.engine = QmlInstantEngine()
self.engine.addFilesFromDirectory(qmlDir, recursive=True)
self.engine.setWatching(os.environ.get("MESHROOM_INSTANT_CODING", False))
# whether to output qml warnings to stderr (disable by default)
self.engine.setOutputWarningsToStandardError(MessageHandler.outputQmlWarnings)
if QtCore.__version_info__ < (5, 14, 2):
# After 5.14.1, it gets stuck during logging
qInstallMessageHandler(MessageHandler.handler)
self.engine.addImportPath(qmlDir)
components.registerTypes()
# expose available node types that can be instantiated
self.engine.rootContext().setContextProperty(
"_nodeTypes",
{n: {"category": nodesDesc[n].category} for n in sorted(nodesDesc.keys())},
)
# instantiate Reconstruction object
self._undoStack = commands.UndoStack(self)
self._taskManager = TaskManager(self)
r = Reconstruction(
undoStack=self._undoStack,
taskManager=self._taskManager,
defaultPipeline=args.pipeline,
parent=self,
)
r.setSubmitLabel(args.submitLabel)
self.engine.rootContext().setContextProperty("_reconstruction", r)
# those helpers should be available from QML Utils module as singletons, but:
# - qmlRegisterUncreatableType is not yet available in PySide2
# - declaring them as singleton in qmldir file causes random crash at exit
# => expose them as context properties instead
self.engine.rootContext().setContextProperty(
"Filepath", FilepathHelper(parent=self)
)
self.engine.rootContext().setContextProperty(
"Scene3DHelper", Scene3DHelper(parent=self)
)
self.engine.rootContext().setContextProperty(
"Transformations3DHelper", Transformations3DHelper(parent=self)
)
self.engine.rootContext().setContextProperty(
"Clipboard", ClipboardHelper(parent=self)
)
self.engine.rootContext().setContextProperty(
"ThumbnailCache", ThumbnailCache(parent=self)
)
# additional context properties
self.engine.rootContext().setContextProperty(
"_PaletteManager", PaletteManager(self.engine, parent=self)
)
self.engine.rootContext().setContextProperty("MeshroomApp", self)
# request any potential computation to stop on exit
self.aboutToQuit.connect(r.stopChildThreads)
if args.project and not os.path.isfile(args.project):
raise RuntimeError(
"Meshroom Command Line Error: 'PROJECT' argument should be a Meshroom project file (.mg).\n"
"Invalid value: '{}'".format(args.project)
)
if args.project:
r.load(args.project)
self.addRecentProjectFile(args.project)
else:
r.new()
# import is a python keyword, so we have to access the attribute by a string
if getattr(args, "import", None):
r.importImagesFromFolder(getattr(args, "import"), recursive=False)
if args.importRecursive:
r.importImagesFromFolder(args.importRecursive, recursive=True)
if args.save:
if os.path.isfile(args.save):
raise RuntimeError(
"Meshroom Command Line Error: Cannot save the new Meshroom project as the file (.mg) already exists.\n"
"Invalid value: '{}'".format(args.save)
)
projectFolder = os.path.dirname(args.save)
if not os.path.isdir(projectFolder):
if not os.path.isdir(os.path.dirname(projectFolder)):
raise RuntimeError(
"Meshroom Command Line Error: Cannot save the new Meshroom project file (.mg) as the parent of the folder does not exists.\n"
"Invalid value: '{}'".format(args.save)
)
os.mkdir(projectFolder)
r.saveAs(args.save)
self.addRecentProjectFile(args.save)
self.engine.load(os.path.normpath(url))
def _pipelineTemplateFiles(self):
templates = []
for key in sorted(meshroom.core.pipelineTemplates.keys()):
# Use uppercase letters in the names as separators to format the templates' name nicely
# e.g: the template "panoramaHdr" will be shown as "Panorama Hdr" in the menu
name = " ".join(re.findall("[A-Z][^A-Z]*", key[0].upper() + key[1:]))
variant = {
"name": name,
"key": key,
"path": meshroom.core.pipelineTemplates[key],
}
templates.append(variant)
return templates
def _pipelineTemplateNames(self):
return [p["name"] for p in self.pipelineTemplateFiles]
@Slot()
def reloadTemplateList(self):
for f in meshroom.core.pipelineTemplatesFolders:
meshroom.core.loadPipelineTemplates(f)
self.pipelineTemplateFilesChanged.emit()
def _recentProjectFiles(self):
projects = []
settings = QSettings()
settings.beginGroup("RecentFiles")
size = settings.beginReadArray("Projects")
for i in range(size):
settings.setArrayIndex(i)
p = settings.value("filepath")
if p:
projects.append(p)
settings.endArray()
return projects
@Slot(str)
@Slot(QUrl)
def addRecentProjectFile(self, projectFile):
if not isinstance(projectFile, (QUrl, str)):
raise TypeError("Unexpected data type: {}".format(projectFile.__class__))
if isinstance(projectFile, QUrl):
projectFileNorm = projectFile.toLocalFile()
if not projectFileNorm:
projectFileNorm = projectFile.toString()
else:
projectFileNorm = QUrl(projectFile).toLocalFile()
if not projectFileNorm:
projectFileNorm = QUrl.fromLocalFile(projectFile).toLocalFile()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# remove previous usage of the value
if projectFileNorm in uniqueProjects:
projects.remove(projectFileNorm)
# add the new value in the first place
projects.insert(0, projectFileNorm)
# keep only the 20 first elements
projects = projects[0:20]
settings = QSettings()
settings.beginGroup("RecentFiles")
settings.beginWriteArray("Projects")
for i, p in enumerate(projects):
settings.setArrayIndex(i)
settings.setValue("filepath", p)
settings.endArray()
settings.sync()
self.recentProjectFilesChanged.emit()
@Slot(str)
@Slot(QUrl)
def removeRecentProjectFile(self, projectFile):
if not isinstance(projectFile, (QUrl, str)):
raise TypeError("Unexpected data type: {}".format(projectFile.__class__))
if isinstance(projectFile, QUrl):
projectFileNorm = projectFile.toLocalFile()
if not projectFileNorm:
projectFileNorm = projectFile.toString()
else:
projectFileNorm = QUrl(projectFile).toLocalFile()
if not projectFileNorm:
projectFileNorm = QUrl.fromLocalFile(projectFile).toLocalFile()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# remove previous usage of the value
if projectFileNorm not in uniqueProjects:
return
projects.remove(projectFileNorm)
settings = QSettings()
settings.beginGroup("RecentFiles")
settings.beginWriteArray("Projects")
for i, p in enumerate(projects):
settings.setArrayIndex(i)
settings.setValue("filepath", p)
settings.endArray()
settings.sync()
self.recentProjectFilesChanged.emit()
def _recentImportedImagesFolders(self):
folders = []
settings = QSettings()
settings.beginGroup("RecentFiles")
size = settings.beginReadArray("ImagesFolders")
for i in range(size):
settings.setArrayIndex(i)
f = settings.value("path")
if f:
folders.append(f)
settings.endArray()
return folders
@Slot(QUrl)
def addRecentImportedImagesFolder(self, imagesFolder):
if isinstance(imagesFolder, QUrl):
folderPath = imagesFolder.toLocalFile()
if not folderPath:
folderPath = imagesFolder.toString()
else:
raise TypeError("Unexpected data type: {}".format(imagesFolder.__class__))
folders = self._recentImportedImagesFolders()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueFolders = OrderedDict.fromkeys(folders)
folders = list(uniqueFolders)
# remove previous usage of the value
if folderPath in uniqueFolders:
folders.remove(folderPath)
# add the new value in the first place
folders.insert(0, folderPath)
# keep only the first three elements to have a backup if one of the folders goes missing
folders = folders[0:3]
settings = QSettings()
settings.beginGroup("RecentFiles")
settings.beginWriteArray("ImagesFolders")
for i, p in enumerate(folders):
settings.setArrayIndex(i)
settings.setValue("path", p)
settings.endArray()
settings.sync()
self.recentImportedImagesFoldersChanged.emit()
@Slot(QUrl)
def removeRecentImportedImagesFolder(self, imagesFolder):
if isinstance(imagesFolder, QUrl):
folderPath = imagesFolder.toLocalFile()
if not folderPath:
folderPath = imagesFolder.toString()
else:
raise TypeError("Unexpected data type: {}".format(imagesFolder.__class__))
folders = self._recentImportedImagesFolders()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueFolders = OrderedDict.fromkeys(folders)
folders = list(uniqueFolders)
# remove previous usage of the value
if folderPath not in uniqueFolders:
return
folders.remove(folderPath)
settings = QSettings()
settings.beginGroup("RecentFiles")
settings.beginWriteArray("ImagesFolders")
for i, f in enumerate(folders):
settings.setArrayIndex(i)
settings.setValue("path", f)
settings.endArray()
settings.sync()
self.recentImportedImagesFoldersChanged.emit()
@Slot(str, result=str)
def markdownToHtml(self, md):
"""
Convert markdown to HTML.
Args:
md (str): the markdown text to convert
Returns:
str: the resulting HTML string
"""
try:
from markdown import markdown
except ImportError:
logging.warning(
"Can't import markdown module, returning source markdown text."
)
return md
return markdown(md)
def _systemInfo(self):
import platform
import sys
return {
"platform": "{} {}".format(platform.system(), platform.release()),
"python": "Python {}".format(sys.version.split(" ")[0]),
}
systemInfo = Property(QJsonValue, _systemInfo, constant=True)
def _changelogModel(self):
"""
Get the complete changelog for the application.
Model provides:
title: the name of the changelog
localUrl: the local path to CHANGES.md
onlineUrl: the remote path to CHANGES.md
"""
rootDir = os.environ.get("MESHROOM_INSTALL_DIR", os.getcwd())
return [
{
"title": "Changelog",
"localUrl": os.path.join(rootDir, "CHANGES.md"),
"onlineUrl": "https://raw.githubusercontent.com/alicevision/meshroom/develop/CHANGES.md",
}
]
def _licensesModel(self):
"""
Get info about open-source licenses for the application.
Model provides:
title: the name of the project
localUrl: the local path to COPYING.md
onlineUrl: the remote path to COPYING.md
"""
rootDir = os.environ.get("MESHROOM_INSTALL_DIR", os.getcwd())
return [
{
"title": "Meshroom",
"localUrl": os.path.join(rootDir, "COPYING.md"),
"onlineUrl": "https://raw.githubusercontent.com/alicevision/meshroom/develop/COPYING.md",
},
{
"title": "AliceVision",
"localUrl": os.path.join(
rootDir, "aliceVision", "share", "aliceVision", "COPYING.md"
),
"onlineUrl": "https://raw.githubusercontent.com/alicevision/AliceVision/develop/COPYING.md",
},
]
def _default8bitViewerEnabled(self):
return bool(os.environ.get("MESHROOM_USE_8BIT_VIEWER", False))
changelogModel = Property("QVariantList", _changelogModel, constant=True)
licensesModel = Property("QVariantList", _licensesModel, constant=True)
pipelineTemplateFilesChanged = Signal()
recentProjectFilesChanged = Signal()
recentImportedImagesFoldersChanged = Signal()
pipelineTemplateFiles = Property(
"QVariantList", _pipelineTemplateFiles, notify=pipelineTemplateFilesChanged
)
pipelineTemplateNames = Property(
"QVariantList", _pipelineTemplateNames, notify=pipelineTemplateFilesChanged
)
recentProjectFiles = Property(
"QVariantList", _recentProjectFiles, notify=recentProjectFilesChanged
)
recentImportedImagesFolders = Property(
"QVariantList",
_recentImportedImagesFolders,
notify=recentImportedImagesFoldersChanged,
)
default8bitViewerEnabled = Property(bool, _default8bitViewerEnabled, constant=True)
|
util | fileoperations | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# SoundConverter - GNOME application for converting between audio formats.
# Copyright 2004 Lars Wirzenius
# Copyright 2005-2020 Gautier Portet
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 3 of the License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
import os
import re
import urllib.error
import urllib.parse
import urllib.request
from gi.repository import Gio
from soundconverter.util.logger import logger
def unquote_filename(filename):
"""Transform an URL encoded filename to a non-encoded one.
E.g. '%20' will be changed to ' '
"""
return urllib.parse.unquote(str(filename))
def beautify_uri(uri):
"""Convert an URI to a normal path.
Also returns the prefix, for example 'file://'"""
match = split_uri(uri)
if match[0] is not None:
# take the path part from the uri
path = match[1]
path = unquote_filename(path)
else:
# no uri, take as is, return any existing %20 strings without
# modifying them.
path = uri
return path
def vfs_walk(uri):
"""Similar to os.path.walk, but with Gio.
uri -- the base folder uri.
return a list of uri.
"""
filelist = []
try:
dirlist = Gio.file_parse_name(uri).enumerate_children(
"*", Gio.FileMonitorFlags.NONE, None
)
for file_info in dirlist:
info = dirlist.get_child(file_info).query_file_type(
Gio.FileMonitorFlags.NONE, None
)
uri = dirlist.get_child(file_info).get_uri()
if info == Gio.FileType.DIRECTORY:
filelist.extend(vfs_walk(uri))
if info == Gio.FileType.REGULAR:
filelist.append(str(uri))
except Exception as e:
# this is impossible to write unittests for, because this only happens
# when the owner of this directory is e.g. root
logger.error('Failed to walk "%s": "%s"', uri, e)
return filelist
def vfs_getparent(path):
"""Get folder name."""
gfile = Gio.file_parse_name(path)
return gfile.get_parent()
def vfs_unlink(filename):
"""Delete a gnomevfs file."""
gfile = Gio.file_parse_name(filename)
return gfile.delete(None)
def vfs_rename(original, newname):
"""Rename a gnomevfs file."""
gforiginal = Gio.file_parse_name(original)
gfnew = Gio.file_parse_name(newname)
if not gfnew.get_parent().query_exists(None):
fgnew_uri = gfnew.get_parent().get_uri()
logger.debug("Creating folder: '{}'".format(fgnew_uri))
Gio.File.make_directory_with_parents(gfnew.get_parent(), None)
gforiginal.move(gfnew, Gio.FileCopyFlags.NONE, None, None, None)
def vfs_exists(filename):
"""Check if file or URI exists."""
if not is_uri(filename):
# gio does not support relative path syntax
filename = os.path.realpath(filename)
gfile = Gio.file_parse_name(filename)
return gfile.query_exists(None)
def split_uri(uri):
"""Match a regex to the uri that results in:
[0]: scheme and authority, might be None if not an uri
[1]: filename. This still has to be unquoted!
"""
if not isinstance(uri, str):
raise ValueError("cannot split {} {}".format(type(uri), uri))
match = re.match(r"^([a-zA-Z]+://([^/]+?)?)?(/.*)", uri)
if match is None:
# not an uri
return None, uri
return match[1], match[3]
def is_uri(uri):
return split_uri(uri)[0] is not None
def filename_to_uri(filename, prefix="file://"):
"""Convert a filename to a valid uri.
Parameters
----------
filename : string
Filename can be a relative or absolute path, or an URI. If an URI,
only characters that are not escaped yet will be escaped.
prefix : string
for example 'file://'
"""
match = split_uri(filename)
if match[0]:
# it's an URI! It can be basically just returned as is. But to make
# sure that all characters are URI escaped, the path will be
# escaped again. Don't quote the schema.
# e.g. a pattern contained file:// in front but inserting tags into it
# resulted in whitespaces.
# ' %20' to ' ' to '%20%20'. Don't quote it to '%20%2520'!
filename = unquote_filename(match[1])
filename = urllib.parse.quote(filename)
uri = match[0] + filename
else:
# convert to absolute path
filename = os.path.realpath(filename)
# it's a normal path. If it happens to contain %25, it might be
# part of the album name or something. ' %20' should become '%20%2520'
uri = prefix + urllib.parse.quote(filename)
return uri
# GStreamer gnomevfssrc helpers
def vfs_encode_filename(filename):
return filename_to_uri(filename)
def file_encode_filename(filename):
return Gio.get_local_path_from_uri(filename).replace(" ", r"\ ")
|
implant | add | import wx
from logbook import Logger
from service.fit import Fit
pyfalog = Logger(__name__)
class CalcAddImplantCommand(wx.Command):
def __init__(self, fitID, implantInfo, position=None):
wx.Command.__init__(self, True, "Add Implant")
self.fitID = fitID
self.newImplantInfo = implantInfo
self.newPosition = position
self.oldImplantInfo = None
self.oldPosition = None
def Do(self):
pyfalog.debug(
"Doing addition of implant {} to fit {}".format(
self.newImplantInfo, self.fitID
)
)
fit = Fit.getInstance().getFit(self.fitID)
if any(self.newImplantInfo.itemID == i.itemID for i in fit.implants):
pyfalog.debug("Skipping as such implant is already on the fit")
return False
newImplant = self.newImplantInfo.toImplant()
if newImplant is None:
return False
self.oldPosition, self.oldImplantInfo = fit.implants.makeRoom(newImplant)
if self.newPosition is not None:
fit.implants.insert(self.newPosition, newImplant)
if newImplant not in fit.implants:
pyfalog.warning("Failed to insert to list")
cmd = CalcAddImplantCommand(
fitID=self.fitID,
implantInfo=self.oldImplantInfo,
position=self.oldPosition,
)
cmd.Do()
return False
else:
fit.implants.append(newImplant)
if newImplant not in fit.implants:
pyfalog.warning("Failed to append to list")
cmd = CalcAddImplantCommand(
fitID=self.fitID,
implantInfo=self.oldImplantInfo,
position=self.oldPosition,
)
cmd.Do()
return False
self.newPosition = fit.implants.index(newImplant)
return True
def Undo(self):
pyfalog.debug(
"Undo addition of implant {} to fit {}".format(
self.newImplantInfo, self.fitID
)
)
if self.oldImplantInfo is not None and self.oldPosition is not None:
cmd = CalcAddImplantCommand(
fitID=self.fitID,
implantInfo=self.oldImplantInfo,
position=self.oldPosition,
)
return cmd.Do()
from .remove import CalcRemoveImplantCommand
cmd = CalcRemoveImplantCommand(fitID=self.fitID, position=self.newPosition)
return cmd.Do()
|
gui | colorpreview | # This file is part of MyPaint.
# Copyright (C) 2013-2018 by the MyPaint Development Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
"""Color preview widget / current color indicator, for the status bar."""
# TODO: This *might* evolve to a color preview + alpha selector, possibly
# TODO: with a history row taking up the bottom. For now let's draw it at
# TODO: an aspect ratio of about 1:5 and see how users like it.
from __future__ import division, print_function
from .colors import PreviousCurrentColorAdjuster
class BrushColorIndicator(PreviousCurrentColorAdjuster):
"""Previous/Current color adjuster bound to app.brush_color_manager"""
__gtype_name__ = "MyPaintBrushColorIndicator"
HAS_DETAILS_DIALOG = False
def __init__(self):
PreviousCurrentColorAdjuster.__init__(self)
self.connect("realize", self._init_color_manager)
self._app = None
self.clicked += self._clicked_cb
def _init_color_manager(self, widget):
from gui.application import get_app
self._app = get_app()
mgr = self._app.brush_color_manager
assert mgr is not None
self.set_color_manager(mgr)
def _clicked_cb(self, adj, event, pos):
x0, y0 = pos
w = self.get_allocated_width()
if x0 > w // 2:
return
chooser = self._app.drawWindow.color_chooser
if chooser.get_visible():
chooser.hide()
else:
chooser.popup(
widget=self,
above=True,
textwards=True,
event=event,
)
|
Gui | Boundary | # -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2019 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import FreeCADGui
import Path
import Path.Dressup.Boundary as PathDressupPathBoundary
import PathGui
from PySide import QtGui
from PySide.QtCore import QT_TRANSLATE_NOOP
if False:
Path.Log.setLevel(Path.Log.Level.DEBUG, Path.Log.thisModule())
Path.Log.trackModule(Path.Log.thisModule())
else:
Path.Log.setLevel(Path.Log.Level.INFO, Path.Log.thisModule())
translate = FreeCAD.Qt.translate
class TaskPanel(object):
def __init__(self, obj, viewProvider):
self.obj = obj
self.viewProvider = viewProvider
self.form = FreeCADGui.PySideUic.loadUi(":/panels/DressupPathBoundary.ui")
if obj.Stock:
self.visibilityBoundary = obj.Stock.ViewObject.Visibility
obj.Stock.ViewObject.Visibility = True
else:
self.visibilityBoundary = False
self.buttonBox = None
self.isDirty = False
self.stockFromBase = None
self.stockFromExisting = None
self.stockCreateBox = None
self.stockCreateCylinder = None
self.stockEdit = None
def getStandardButtons(self):
return int(
QtGui.QDialogButtonBox.Ok
| QtGui.QDialogButtonBox.Apply
| QtGui.QDialogButtonBox.Cancel
)
def modifyStandardButtons(self, buttonBox):
self.buttonBox = buttonBox
def setDirty(self):
self.isDirty = True
self.buttonBox.button(QtGui.QDialogButtonBox.Apply).setEnabled(True)
def setClean(self):
self.isDirty = False
self.buttonBox.button(QtGui.QDialogButtonBox.Apply).setEnabled(False)
def clicked(self, button):
# callback for standard buttons
if button == QtGui.QDialogButtonBox.Apply:
self.updateDressup()
FreeCAD.ActiveDocument.recompute()
def abort(self):
FreeCAD.ActiveDocument.abortTransaction()
self.cleanup(False)
def reject(self):
FreeCAD.ActiveDocument.abortTransaction()
self.cleanup(True)
def accept(self):
if self.isDirty:
self.updateDressup()
FreeCAD.ActiveDocument.commitTransaction()
self.cleanup(True)
def cleanup(self, gui):
self.viewProvider.clearTaskPanel()
if gui:
FreeCADGui.ActiveDocument.resetEdit()
FreeCADGui.Control.closeDialog()
FreeCAD.ActiveDocument.recompute()
if self.obj.Stock:
self.obj.Stock.ViewObject.Visibility = self.visibilityBoundary
def updateDressup(self):
if self.obj.Inside != self.form.stockInside.isChecked():
self.obj.Inside = self.form.stockInside.isChecked()
self.stockEdit.getFields(self.obj)
self.setClean()
def updateStockEditor(self, index, force=False):
import Path.Main.Gui.Job as PathJobGui
import Path.Main.Stock as PathStock
def setupFromBaseEdit():
Path.Log.track(index, force)
if force or not self.stockFromBase:
self.stockFromBase = PathJobGui.StockFromBaseBoundBoxEdit(
self.obj, self.form, force
)
self.stockEdit = self.stockFromBase
def setupCreateBoxEdit():
Path.Log.track(index, force)
if force or not self.stockCreateBox:
self.stockCreateBox = PathJobGui.StockCreateBoxEdit(
self.obj, self.form, force
)
self.stockEdit = self.stockCreateBox
def setupCreateCylinderEdit():
Path.Log.track(index, force)
if force or not self.stockCreateCylinder:
self.stockCreateCylinder = PathJobGui.StockCreateCylinderEdit(
self.obj, self.form, force
)
self.stockEdit = self.stockCreateCylinder
def setupFromExisting():
Path.Log.track(index, force)
if force or not self.stockFromExisting:
self.stockFromExisting = PathJobGui.StockFromExistingEdit(
self.obj, self.form, force
)
if self.stockFromExisting.candidates(self.obj):
self.stockEdit = self.stockFromExisting
return True
return False
if index == -1:
if self.obj.Stock is None or PathJobGui.StockFromBaseBoundBoxEdit.IsStock(
self.obj
):
setupFromBaseEdit()
elif PathJobGui.StockCreateBoxEdit.IsStock(self.obj):
setupCreateBoxEdit()
elif PathJobGui.StockCreateCylinderEdit.IsStock(self.obj):
setupCreateCylinderEdit()
elif PathJobGui.StockFromExistingEdit.IsStock(self.obj):
setupFromExisting()
else:
Path.Log.error(
translate("PathJob", "Unsupported stock object %s")
% self.obj.Stock.Label
)
else:
if index == PathJobGui.StockFromBaseBoundBoxEdit.Index:
setupFromBaseEdit()
elif index == PathJobGui.StockCreateBoxEdit.Index:
setupCreateBoxEdit()
elif index == PathJobGui.StockCreateCylinderEdit.Index:
setupCreateCylinderEdit()
elif index == PathJobGui.StockFromExistingEdit.Index:
if not setupFromExisting():
setupFromBaseEdit()
index = -1
else:
Path.Log.error(
translate("PathJob", "Unsupported stock type %s (%d)")
% (self.form.stock.currentText(), index)
)
self.stockEdit.activate(self.obj, index == -1)
def setupUi(self):
self.updateStockEditor(-1, False)
self.form.stockInside.setChecked(self.obj.Inside)
self.form.stock.currentIndexChanged.connect(self.updateStockEditor)
self.form.stockInside.stateChanged.connect(self.setDirty)
self.form.stockExtXneg.textChanged.connect(self.setDirty)
self.form.stockExtXpos.textChanged.connect(self.setDirty)
self.form.stockExtYneg.textChanged.connect(self.setDirty)
self.form.stockExtYpos.textChanged.connect(self.setDirty)
self.form.stockExtZneg.textChanged.connect(self.setDirty)
self.form.stockExtZpos.textChanged.connect(self.setDirty)
self.form.stockBoxLength.textChanged.connect(self.setDirty)
self.form.stockBoxWidth.textChanged.connect(self.setDirty)
self.form.stockBoxHeight.textChanged.connect(self.setDirty)
self.form.stockCylinderRadius.textChanged.connect(self.setDirty)
self.form.stockCylinderHeight.textChanged.connect(self.setDirty)
class DressupPathBoundaryViewProvider(object):
def __init__(self, vobj):
self.attach(vobj)
def dumps(self):
return None
def loads(self, state):
return None
def attach(self, vobj):
self.vobj = vobj
self.obj = vobj.Object
self.panel = None
def claimChildren(self):
return [self.obj.Base, self.obj.Stock]
def onDelete(self, vobj, args=None):
if vobj.Object and vobj.Object.Proxy:
vobj.Object.Proxy.onDelete(vobj.Object, args)
return True
def setEdit(self, vobj, mode=0):
panel = TaskPanel(vobj.Object, self)
self.setupTaskPanel(panel)
return True
def unsetEdit(self, vobj, mode=0):
if self.panel:
self.panel.abort()
def setupTaskPanel(self, panel):
self.panel = panel
FreeCADGui.Control.closeDialog()
FreeCADGui.Control.showDialog(panel)
panel.setupUi()
def clearTaskPanel(self):
self.panel = None
def Create(base, name="DressupPathBoundary"):
FreeCAD.ActiveDocument.openTransaction("Create a Boundary dressup")
obj = PathDressupPathBoundary.Create(base, name)
obj.ViewObject.Proxy = DressupPathBoundaryViewProvider(obj.ViewObject)
obj.Base.ViewObject.Visibility = False
obj.Stock.ViewObject.Visibility = False
FreeCAD.ActiveDocument.commitTransaction()
obj.ViewObject.Document.setEdit(obj.ViewObject, 0)
return obj
class CommandPathDressupPathBoundary:
def GetResources(self):
return {
"Pixmap": "Path_Dressup",
"MenuText": QT_TRANSLATE_NOOP("Path_DressupPathBoundary", "Boundary"),
"ToolTip": QT_TRANSLATE_NOOP(
"Path_DressupPathBoundary",
"Creates a Path Boundary Dress-up from a selected path",
),
}
def IsActive(self):
if FreeCAD.ActiveDocument is not None:
for o in FreeCAD.ActiveDocument.Objects:
if o.Name[:3] == "Job":
return True
return False
def Activated(self):
# check that the selection contains exactly what we want
selection = FreeCADGui.Selection.getSelection()
if len(selection) != 1:
Path.Log.error(
translate("Path_DressupPathBoundary", "Please select one path object")
+ "\n"
)
return
baseObject = selection[0]
# everything ok!
FreeCAD.ActiveDocument.openTransaction("Create Path Boundary Dress-up")
FreeCADGui.addModule("Path.Dressup.Gui.Boundary")
FreeCADGui.doCommand(
"Path.Dressup.Gui.Boundary.Create(App.ActiveDocument.%s)" % baseObject.Name
)
# FreeCAD.ActiveDocument.commitTransaction() # Final `commitTransaction()` called via TaskPanel.accept()
FreeCAD.ActiveDocument.recompute()
if FreeCAD.GuiUp:
# register the FreeCAD command
FreeCADGui.addCommand("Path_DressupPathBoundary", CommandPathDressupPathBoundary())
Path.Log.notice("Loading PathDressupPathBoundaryGui... done\n")
|
plugins | rtvs | """
$description Live TV channels from RTVS, a Slovak public, state-owned broadcaster.
$url rtvs.sk
$type live
$region Slovakia
"""
import re
from urllib.parse import urlparse
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
@pluginmatcher(
re.compile(
r"https?://www\.rtvs\.sk/televizia/(?:live-|sport)",
)
)
class Rtvs(Plugin):
def _get_streams(self):
channel = self.session.http.get(
self.url,
schema=validate.Schema(
validate.parse_html(),
validate.xml_xpath_string(".//iframe[@id='player_live']//@src"),
validate.url(path=validate.startswith("/embed/live/")),
validate.transform(lambda embed: urlparse(embed).path[len("/embed/live/") :]),
),
)
if not channel:
return
videos = self.session.http.get(
"https://www.rtvs.sk/json/live5f.json",
params={
"c": channel,
"b": "mozilla",
"p": "win",
"f": "0",
"d": "1",
},
schema=validate.Schema(
validate.parse_json(),
{
"clip": {
"sources": [
{
"src": validate.url(),
"type": str,
}
],
},
},
validate.get(("clip", "sources")),
validate.filter(lambda n: n["type"] == "application/x-mpegurl"),
),
)
for video in videos:
return HLSStream.parse_variant_playlist(self.session, video["src"])
__plugin__ = Rtvs
|
controllers | zoom_ctrl | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 by Ihor E. Novikov
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from generic import AbstractController
from sk1 import modes
ZOOM_IN = 1.25
ZOOM_OUT = 0.8
class ZoomController(AbstractController):
mode = modes.ZOOM_MODE
def __init__(self, canvas, presenter):
AbstractController.__init__(self, canvas, presenter)
def escape_pressed(self):
if not self.start:
self.canvas.set_mode()
def mouse_right_down(self, event):
self.start = event.get_point()
cursor = self.canvas.app.cursors[modes.ZOOM_OUT_MODE]
self.canvas.set_temp_cursor(cursor)
def mouse_right_up(self, event):
if not self.draw:
self.canvas.zoom_at_point(self.start, ZOOM_OUT)
self.canvas.restore_cursor()
def do_action(self, event):
if self.start and self.end:
change_x = abs(self.end[0] - self.start[0])
change_y = abs(self.end[1] - self.start[1])
if change_x < 5 and change_y < 5:
zoom = ZOOM_OUT if event.is_ctrl() else ZOOM_IN
self.canvas.zoom_at_point(self.start, zoom)
else:
self.canvas.zoom_to_rectangle(self.start, self.end)
self.start = []
self.end = []
return False
|
downloaders | RPNetBiz | # -*- coding: utf-8 -*-
import json
from ..base.multi_downloader import MultiDownloader
class RPNetBiz(MultiDownloader):
__name__ = "RPNetBiz"
__type__ = "downloader"
__version__ = "0.22"
__status__ = "testing"
__pattern__ = r"https?://.+rpnet\.biz"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", False),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
("revert_failed", "bool", "Revert to standard download if fails", True),
]
__description__ = """RPNet.biz multi-downloader plugin"""
__license__ = "GPLv3"
__authors__ = [("Dman", "dmanugm@gmail.com")]
def setup(self):
self.chunk_limit = -1
def handle_premium(self, pyfile):
user, info = self.account.select()
res = self.load(
"https://premium.rpnet.biz/client_api.php",
get={
"username": user,
"password": info["login"]["password"],
"action": "generate",
"links": pyfile.url,
},
)
self.log_debug(f"JSON data: {res}")
#: Get the first link... since we only queried one
link_status = json.loads(res)["links"][0]
#: Check if we only have an id as a HDD link
if "id" in link_status:
self.log_debug("Need to wait at least 30 seconds before requery")
self.wait(30) #: Wait for 30 seconds
#: Lets query the server again asking for the status on the link,
#: We need to keep doing this until we reach 100
attempts = 30
my_try = 0
while my_try <= attempts:
self.log_debug(f"Try: {my_try}; Max Tries: {attempts}")
res = self.load(
"https://premium.rpnet.biz/client_api.php",
get={
"username": user,
"password": info["login"]["password"],
"action": "downloadInformation",
"id": link_status["id"],
},
)
self.log_debug(f"JSON data hdd query: {res}")
download_status = json.loads(res)["download"]
dl_status = download_status["status"]
if dl_status == "100":
lk_status = link_status["generated"] = download_status["rpnet_link"]
self.log_debug(f"Successfully downloaded to rpnet HDD: {lk_status}")
break
else:
self.log_debug(f"At {dl_status}% for the file download")
self.wait(30)
my_try += 1
if my_try > attempts: #: We went over the limit!
self.fail(
self._(
"Waited for about 15 minutes for download to finish but failed"
)
)
if "generated" in link_status:
self.link = link_status["generated"]
return
elif "error" in link_status:
self.fail(link_status["error"])
else:
self.fail(self._("Something went wrong, not supposed to enter here"))
|
services | validators | # -*- coding: utf-8 -*-
"""
flaskbb.user.services.validators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Validators for use with user services.
:copyright: (c) 2018 the Flaskbb Team.
:license: BSD, see LICENSE for more details
"""
import attr
from flask_babelplus import gettext as _
from requests.exceptions import RequestException
from sqlalchemy import func
from ...core.changesets import ChangeSetValidator
from ...core.exceptions import StopValidation, ValidationError
from ...utils.helpers import check_image
@attr.s(eq=False, order=False, hash=False, frozen=True, repr=True)
class CantShareEmailValidator(ChangeSetValidator):
"""
Validates that the new email for the user isn't currently registered by
another user.
"""
users = attr.ib()
def validate(self, model, changeset):
others = self.users.query.filter(
self.users.id != model.id,
func.lower(self.users.email) == changeset.new_email,
).count()
if others != 0:
raise ValidationError(
"new_email",
_("%(email)s is already registered", email=changeset.new_email),
)
class OldEmailMustMatch(ChangeSetValidator):
"""
Validates that the email entered by the user is the current email of the user.
"""
def validate(self, model, changeset):
if model.email != changeset.old_email:
raise StopValidation([("old_email", _("Old email does not match"))])
class EmailsMustBeDifferent(ChangeSetValidator):
"""
Validates that the new email entered by the user isn't the same as the
current email for the user.
"""
def validate(self, model, changeset):
if model.email == changeset.new_email:
raise ValidationError("new_email", _("New email address must be different"))
class PasswordsMustBeDifferent(ChangeSetValidator):
"""
Validates that the new password entered by the user isn't the same as the
current email for the user.
"""
def validate(self, model, changeset):
if model.check_password(changeset.new_password):
raise ValidationError("new_password", _("New password must be different"))
class OldPasswordMustMatch(ChangeSetValidator):
"""
Validates that the old password entered by the user is the current password
for the user.
"""
def validate(self, model, changeset):
if not model.check_password(changeset.old_password):
raise StopValidation([("old_password", _("Old password is wrong"))])
class ValidateAvatarURL(ChangeSetValidator):
"""
Validates that the target avatar url currently meets constraints like
height and width.
.. warning::
This validator only checks the **current** state of the image however
if the image at the URL changes then this isn't re-run and the new
image could break these contraints.
"""
def validate(self, user, details_change):
if not details_change.avatar:
return
try:
error, ignored = check_image(details_change.avatar)
if error:
raise ValidationError("avatar", error)
except RequestException:
raise ValidationError("avatar", _("Could not retrieve avatar"))
|
extractor | slutload | from __future__ import unicode_literals
from .common import InfoExtractor
class SlutloadIE(InfoExtractor):
_VALID_URL = r"https?://(?:\w+\.)?slutload\.com/(?:video/[^/]+|embed_player|watch)/(?P<id>[^/]+)"
_TESTS = [
{
"url": "http://www.slutload.com/video/virginie-baisee-en-cam/TD73btpBqSxc/",
"md5": "868309628ba00fd488cf516a113fd717",
"info_dict": {
"id": "TD73btpBqSxc",
"ext": "mp4",
"title": "virginie baisee en cam",
"age_limit": 18,
"thumbnail": r"re:https?://.*?\.jpg",
},
},
{
# mobile site
"url": "http://mobile.slutload.com/video/masturbation-solo/fviFLmc6kzJ/",
"only_matching": True,
},
{
"url": "http://www.slutload.com/embed_player/TD73btpBqSxc/",
"only_matching": True,
},
{
"url": "http://www.slutload.com/watch/TD73btpBqSxc/Virginie-Baisee-En-Cam.html",
"only_matching": True,
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
embed_page = self._download_webpage(
"http://www.slutload.com/embed_player/%s" % video_id,
video_id,
"Downloading embed page",
fatal=False,
)
if embed_page:
def extract(what):
return self._html_search_regex(
r'data-video-%s=(["\'])(?P<url>(?:(?!\1).)+)\1' % what,
embed_page,
"video %s" % what,
default=None,
group="url",
)
video_url = extract("url")
if video_url:
title = self._html_search_regex(
r"<title>([^<]+)", embed_page, "title", default=video_id
)
return {
"id": video_id,
"url": video_url,
"title": title,
"thumbnail": extract("preview"),
"age_limit": 18,
}
webpage = self._download_webpage(
"http://www.slutload.com/video/_/%s/" % video_id, video_id
)
title = self._html_search_regex(
r"<h1><strong>([^<]+)</strong>", webpage, "title"
).strip()
info = self._parse_html5_media_entries(url, webpage, video_id)[0]
info.update(
{
"id": video_id,
"title": title,
"age_limit": 18,
}
)
return info
|
requests | auth | # -*- coding: utf-8 -*-
"""
requests.auth
~~~~~~~~~~~~~
This module contains the authentication handlers for Requests.
"""
import hashlib
import os
import re
import time
from base64 import b64encode
from .compat import str, urlparse
from .cookies import extract_cookies_to_jar
from .status_codes import codes
from .utils import parse_dict_header, to_native_string
CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
CONTENT_TYPE_MULTI_PART = "multipart/form-data"
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
authstr = "Basic " + to_native_string(
b64encode(("%s:%s" % (username, password)).encode("latin1")).strip()
)
return authstr
class AuthBase(object):
"""Base class that all auth implementations derive from"""
def __call__(self, r):
raise NotImplementedError("Auth hooks must be callable.")
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
def __call__(self, r):
r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
return r
class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r):
r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
return r
class HTTPDigestAuth(AuthBase):
"""Attaches HTTP Digest Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
self.last_nonce = ""
self.nonce_count = 0
self.chal = {}
self.pos = None
self.num_401_calls = 1
def build_digest_header(self, method, url):
realm = self.chal["realm"]
nonce = self.chal["nonce"]
qop = self.chal.get("qop")
algorithm = self.chal.get("algorithm")
opaque = self.chal.get("opaque")
if algorithm is None:
_algorithm = "MD5"
else:
_algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if _algorithm == "MD5" or _algorithm == "MD5-SESS":
def md5_utf8(x):
if isinstance(x, str):
x = x.encode("utf-8")
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
elif _algorithm == "SHA":
def sha_utf8(x):
if isinstance(x, str):
x = x.encode("utf-8")
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if hash_utf8 is None:
return None
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
#: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/"
if p_parsed.query:
path += "?" + p_parsed.query
A1 = "%s:%s:%s" % (self.username, realm, self.password)
A2 = "%s:%s" % (method, path)
HA1 = hash_utf8(A1)
HA2 = hash_utf8(A2)
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
ncvalue = "%08x" % self.nonce_count
s = str(self.nonce_count).encode("utf-8")
s += nonce.encode("utf-8")
s += time.ctime().encode("utf-8")
s += os.urandom(8)
cnonce = hashlib.sha1(s).hexdigest()[:16]
if _algorithm == "MD5-SESS":
HA1 = hash_utf8("%s:%s:%s" % (HA1, nonce, cnonce))
if qop is None:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == "auth" or "auth" in qop.split(","):
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, "auth", HA2)
respdig = KD(HA1, noncebit)
else:
# XXX handle auth-int.
return None
self.last_nonce = nonce
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' 'response="%s"' % (
self.username,
realm,
nonce,
path,
respdig,
)
if opaque:
base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
if qop:
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return "Digest %s" % (base)
def handle_redirect(self, r, **kwargs):
"""Reset num_401_calls counter on redirects."""
if r.is_redirect:
self.num_401_calls = 1
def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
if self.pos is not None:
# Rewind the file position indicator of the body to where
# it was to resend the request.
r.request.body.seek(self.pos)
num_401_calls = getattr(self, "num_401_calls", 1)
s_auth = r.headers.get("www-authenticate", "")
if "digest" in s_auth.lower() and num_401_calls < 2:
self.num_401_calls += 1
pat = re.compile(r"digest ", flags=re.IGNORECASE)
self.chal = parse_dict_header(pat.sub("", s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.raw.release_conn()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
prep.headers["Authorization"] = self.build_digest_header(
prep.method, prep.url
)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
self.num_401_calls = 1
return r
def __call__(self, r):
# If we have a saved nonce, skip the 401
if self.last_nonce:
r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
try:
self.pos = r.body.tell()
except AttributeError:
# In the case of HTTPDigestAuth being reused and the body of
# the previous request was a file-like object, pos has the
# file position of the previous body. Ensure it's set to
# None.
self.pos = None
r.register_hook("response", self.handle_401)
r.register_hook("response", self.handle_redirect)
return r
|
plugins | huya | """
$description Chinese live-streaming platform for live video game broadcasts and individual live streams.
$url huya.com
$type live
$metadata id
$metadata author
$metadata title
"""
import base64
import logging
import re
from html import unescape as html_unescape
from typing import Dict
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.http import HTTPStream
from streamlink.utils.url import update_scheme
log = logging.getLogger(__name__)
@pluginmatcher(
re.compile(
r"https?://(?:www\.)?huya\.com/(?P<channel>[^/]+)",
)
)
class Huya(Plugin):
QUALITY_WEIGHTS: Dict[str, int] = {}
@classmethod
def stream_weight(cls, key):
weight = cls.QUALITY_WEIGHTS.get(key)
if weight:
return weight, "huya"
return super().stream_weight(key)
def _get_streams(self):
data = self.session.http.get(
self.url,
schema=validate.Schema(
validate.parse_html(),
validate.xml_xpath_string(".//script[contains(text(),'var hyPlayerConfig = {')][1]/text()"),
validate.none_or_all(
re.compile(r"""(?P<q>"?)stream(?P=q)\s*:\s*(?:"(?P<base64>.+?)"|(?P<json>\{.+?})\s*}\s*;)"""),
),
validate.none_or_all(
validate.any(
validate.all(
validate.get("base64"),
str,
validate.transform(base64.b64decode),
),
validate.all(
validate.get("json"),
str,
),
),
validate.parse_json(),
{
"data": [
{
"gameLiveInfo": {
"liveId": str,
"nick": str,
"roomName": str,
},
"gameStreamInfoList": [
validate.all(
{
"sCdnType": str,
"iPCPriorityRate": int,
"sStreamName": str,
"sFlvUrl": str,
"sFlvUrlSuffix": str,
"sFlvAntiCode": validate.all(str, validate.transform(lambda v: html_unescape(v))),
},
validate.union_get(
"sCdnType",
"iPCPriorityRate",
"sStreamName",
"sFlvUrl",
"sFlvUrlSuffix",
"sFlvAntiCode",
),
),
],
}
],
},
validate.get(("data", 0)),
validate.union_get(
("gameLiveInfo", "liveId"),
("gameLiveInfo", "nick"),
("gameLiveInfo", "roomName"),
"gameStreamInfoList",
),
),
),
)
if not data:
return
self.id, self.author, self.title, streamdata = data
for cdntype, priority, streamname, flvurl, suffix, anticode in streamdata:
name = f"source_{cdntype.lower()}"
self.QUALITY_WEIGHTS[name] = priority
yield name, HTTPStream(self.session, update_scheme("https://", f"{flvurl}/{streamname}.{suffix}?{anticode}"))
log.debug(f"QUALITY_WEIGHTS: {self.QUALITY_WEIGHTS!r}")
__plugin__ = Huya
|
webkit | cache | # SPDX-FileCopyrightText: Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""HTTP network cache."""
import os.path
from typing import cast
from qutebrowser.config import config
from qutebrowser.qt.network import QNetworkDiskCache
from qutebrowser.utils import standarddir, utils
diskcache = cast("DiskCache", None)
class DiskCache(QNetworkDiskCache):
"""Disk cache which sets correct cache dir and size."""
def __init__(self, cache_dir, parent=None):
super().__init__(parent)
self.setCacheDirectory(os.path.join(cache_dir, "http"))
self._set_cache_size()
config.instance.changed.connect(self._set_cache_size)
def __repr__(self):
return utils.get_repr(
self,
size=self.cacheSize(),
maxsize=self.maximumCacheSize(),
path=self.cacheDirectory(),
)
@config.change_filter("content.cache.size")
def _set_cache_size(self):
"""Set the cache size based on the config."""
size = config.val.content.cache.size
if size is None:
size = 1024 * 1024 * 50 # default from QNetworkDiskCachePrivate
self.setMaximumCacheSize(size)
def init(parent):
"""Initialize the global cache."""
global diskcache
diskcache = DiskCache(standarddir.cache(), parent=parent)
|
youtube-dl | options | from __future__ import unicode_literals
import optparse
import os.path
import re
import sys
from .compat import (
compat_expanduser,
compat_get_terminal_size,
compat_getenv,
compat_kwargs,
)
from .compat import compat_open as open
from .compat import compat_shlex_split
from .downloader.external import list_external_downloaders
from .utils import preferredencoding, write_string
from .version import __version__
def _hide_login_info(opts):
PRIVATE_OPTS = set(
[
"-p",
"--password",
"-u",
"--username",
"--video-password",
"--ap-password",
"--ap-username",
]
)
eqre = re.compile(
"^(?P<key>" + ("|".join(re.escape(po) for po in PRIVATE_OPTS)) + ")=.+$"
)
def _scrub_eq(o):
m = eqre.match(o)
if m:
return m.group("key") + "=PRIVATE"
else:
return o
opts = list(map(_scrub_eq, opts))
for idx, opt in enumerate(opts):
if opt in PRIVATE_OPTS and idx + 1 < len(opts):
opts[idx + 1] = "PRIVATE"
return opts
def parseOpts(overrideArguments=None):
def _readOptions(filename_bytes, default=[]):
try:
optionf = open(filename_bytes, encoding=preferredencoding())
except IOError:
return default # silently skip if file is not present
try:
contents = optionf.read()
res = compat_shlex_split(contents, comments=True)
finally:
optionf.close()
return res
def _readUserConf():
xdg_config_home = compat_getenv("XDG_CONFIG_HOME")
if xdg_config_home:
userConfFile = os.path.join(xdg_config_home, "youtube-dl", "config")
if not os.path.isfile(userConfFile):
userConfFile = os.path.join(xdg_config_home, "youtube-dl.conf")
else:
userConfFile = os.path.join(
compat_expanduser("~"), ".config", "youtube-dl", "config"
)
if not os.path.isfile(userConfFile):
userConfFile = os.path.join(
compat_expanduser("~"), ".config", "youtube-dl.conf"
)
userConf = _readOptions(userConfFile, None)
if userConf is None:
appdata_dir = compat_getenv("appdata")
if appdata_dir:
userConf = _readOptions(
os.path.join(appdata_dir, "youtube-dl", "config"), default=None
)
if userConf is None:
userConf = _readOptions(
os.path.join(appdata_dir, "youtube-dl", "config.txt"),
default=None,
)
if userConf is None:
userConf = _readOptions(
os.path.join(compat_expanduser("~"), "youtube-dl.conf"), default=None
)
if userConf is None:
userConf = _readOptions(
os.path.join(compat_expanduser("~"), "youtube-dl.conf.txt"),
default=None,
)
if userConf is None:
userConf = []
return userConf
def _format_option_string(option):
"""('-o', '--option') -> -o, --format METAVAR"""
opts = []
if option._short_opts:
opts.append(option._short_opts[0])
if option._long_opts:
opts.append(option._long_opts[0])
if len(opts) > 1:
opts.insert(1, ", ")
if option.takes_value():
opts.append(" %s" % option.metavar)
return "".join(opts)
def _comma_separated_values_options_callback(option, opt_str, value, parser):
setattr(parser.values, option.dest, value.split(","))
# No need to wrap help messages if we're on a wide console
columns = compat_get_terminal_size().columns
max_width = columns if columns else 80
max_help_position = 80
fmt = optparse.IndentedHelpFormatter(
width=max_width, max_help_position=max_help_position
)
fmt.format_option_strings = _format_option_string
kw = {
"version": __version__,
"formatter": fmt,
"usage": "%prog [OPTIONS] URL [URL...]",
"conflict_handler": "resolve",
}
parser = optparse.OptionParser(**compat_kwargs(kw))
general = optparse.OptionGroup(parser, "General Options")
general.add_option(
"-h", "--help", action="help", help="Print this help text and exit"
)
general.add_option(
"--version", action="version", help="Print program version and exit"
)
general.add_option(
"-U",
"--update",
action="store_true",
dest="update_self",
help="Update this program to latest version. Make sure that you have sufficient permissions (run with sudo if needed)",
)
general.add_option(
"-i",
"--ignore-errors",
action="store_true",
dest="ignoreerrors",
default=False,
help="Continue on download errors, for example to skip unavailable videos in a playlist",
)
general.add_option(
"--abort-on-error",
action="store_false",
dest="ignoreerrors",
help="Abort downloading of further videos (in the playlist or the command line) if an error occurs",
)
general.add_option(
"--dump-user-agent",
action="store_true",
dest="dump_user_agent",
default=False,
help="Display the current browser identification",
)
general.add_option(
"--list-extractors",
action="store_true",
dest="list_extractors",
default=False,
help="List all supported extractors",
)
general.add_option(
"--extractor-descriptions",
action="store_true",
dest="list_extractor_descriptions",
default=False,
help="Output descriptions of all supported extractors",
)
general.add_option(
"--force-generic-extractor",
action="store_true",
dest="force_generic_extractor",
default=False,
help="Force extraction to use the generic extractor",
)
general.add_option(
"--default-search",
dest="default_search",
metavar="PREFIX",
help='Use this prefix for unqualified URLs. For example "gvsearch2:" downloads two videos from google videos for youtube-dl "large apple". Use the value "auto" to let youtube-dl guess ("auto_warning" to emit a warning when guessing). "error" just throws an error. The default value "fixup_error" repairs broken URLs, but emits an error if this is not possible instead of searching.',
)
general.add_option(
"--ignore-config",
action="store_true",
help="Do not read configuration files. "
"When given in the global configuration file /etc/youtube-dl.conf: "
"Do not read the user configuration in ~/.config/youtube-dl/config "
"(%APPDATA%/youtube-dl/config.txt on Windows)",
)
general.add_option(
"--config-location",
dest="config_location",
metavar="PATH",
help="Location of the configuration file; either the path to the config or its containing directory.",
)
general.add_option(
"--flat-playlist",
action="store_const",
dest="extract_flat",
const="in_playlist",
default=False,
help="Do not extract the videos of a playlist, only list them.",
)
general.add_option(
"--mark-watched",
action="store_true",
dest="mark_watched",
default=False,
help="Mark videos watched (YouTube only)",
)
general.add_option(
"--no-mark-watched",
action="store_false",
dest="mark_watched",
default=False,
help="Do not mark videos watched (YouTube only)",
)
general.add_option(
"--no-color",
"--no-colors",
action="store_true",
dest="no_color",
default=False,
help="Do not emit color codes in output",
)
network = optparse.OptionGroup(parser, "Network Options")
network.add_option(
"--proxy",
dest="proxy",
default=None,
metavar="URL",
help="Use the specified HTTP/HTTPS/SOCKS proxy. To enable "
"SOCKS proxy, specify a proper scheme. For example "
'socks5://127.0.0.1:1080/. Pass in an empty string (--proxy "") '
"for direct connection",
)
network.add_option(
"--socket-timeout",
dest="socket_timeout",
type=float,
default=None,
metavar="SECONDS",
help="Time to wait before giving up, in seconds",
)
network.add_option(
"--source-address",
metavar="IP",
dest="source_address",
default=None,
help="Client-side IP address to bind to",
)
network.add_option(
"-4",
"--force-ipv4",
action="store_const",
const="0.0.0.0",
dest="source_address",
help="Make all connections via IPv4",
)
network.add_option(
"-6",
"--force-ipv6",
action="store_const",
const="::",
dest="source_address",
help="Make all connections via IPv6",
)
geo = optparse.OptionGroup(parser, "Geo Restriction")
geo.add_option(
"--geo-verification-proxy",
dest="geo_verification_proxy",
default=None,
metavar="URL",
help="Use this proxy to verify the IP address for some geo-restricted sites. "
"The default proxy specified by --proxy (or none, if the option is not present) is used for the actual downloading.",
)
geo.add_option(
"--cn-verification-proxy",
dest="cn_verification_proxy",
default=None,
metavar="URL",
help=optparse.SUPPRESS_HELP,
)
geo.add_option(
"--geo-bypass",
action="store_true",
dest="geo_bypass",
default=True,
help="Bypass geographic restriction via faking X-Forwarded-For HTTP header",
)
geo.add_option(
"--no-geo-bypass",
action="store_false",
dest="geo_bypass",
default=True,
help="Do not bypass geographic restriction via faking X-Forwarded-For HTTP header",
)
geo.add_option(
"--geo-bypass-country",
metavar="CODE",
dest="geo_bypass_country",
default=None,
help="Force bypass geographic restriction with explicitly provided two-letter ISO 3166-2 country code",
)
geo.add_option(
"--geo-bypass-ip-block",
metavar="IP_BLOCK",
dest="geo_bypass_ip_block",
default=None,
help="Force bypass geographic restriction with explicitly provided IP block in CIDR notation",
)
selection = optparse.OptionGroup(parser, "Video Selection")
selection.add_option(
"--playlist-start",
dest="playliststart",
metavar="NUMBER",
default=1,
type=int,
help="Playlist video to start at (default is %default)",
)
selection.add_option(
"--playlist-end",
dest="playlistend",
metavar="NUMBER",
default=None,
type=int,
help="Playlist video to end at (default is last)",
)
selection.add_option(
"--playlist-items",
dest="playlist_items",
metavar="ITEM_SPEC",
default=None,
help='Playlist video items to download. Specify indices of the videos in the playlist separated by commas like: "--playlist-items 1,2,5,8" if you want to download videos indexed 1, 2, 5, 8 in the playlist. You can specify range: "--playlist-items 1-3,7,10-13", it will download the videos at index 1, 2, 3, 7, 10, 11, 12 and 13.',
)
selection.add_option(
"--match-title",
dest="matchtitle",
metavar="REGEX",
help="Download only matching titles (case-insensitive regex or alphanumeric sub-string)",
)
selection.add_option(
"--reject-title",
dest="rejecttitle",
metavar="REGEX",
help="Skip download for matching titles (case-insensitive regex or alphanumeric sub-string)",
)
selection.add_option(
"--max-downloads",
dest="max_downloads",
metavar="NUMBER",
type=int,
default=None,
help="Abort after downloading NUMBER files",
)
selection.add_option(
"--min-filesize",
metavar="SIZE",
dest="min_filesize",
default=None,
help="Do not download any videos smaller than SIZE (e.g. 50k or 44.6m)",
)
selection.add_option(
"--max-filesize",
metavar="SIZE",
dest="max_filesize",
default=None,
help="Do not download any videos larger than SIZE (e.g. 50k or 44.6m)",
)
selection.add_option(
"--date",
metavar="DATE",
dest="date",
default=None,
help="Download only videos uploaded in this date",
)
selection.add_option(
"--datebefore",
metavar="DATE",
dest="datebefore",
default=None,
help="Download only videos uploaded on or before this date (i.e. inclusive)",
)
selection.add_option(
"--dateafter",
metavar="DATE",
dest="dateafter",
default=None,
help="Download only videos uploaded on or after this date (i.e. inclusive)",
)
selection.add_option(
"--min-views",
metavar="COUNT",
dest="min_views",
default=None,
type=int,
help="Do not download any videos with less than COUNT views",
)
selection.add_option(
"--max-views",
metavar="COUNT",
dest="max_views",
default=None,
type=int,
help="Do not download any videos with more than COUNT views",
)
selection.add_option(
"--match-filter",
metavar="FILTER",
dest="match_filter",
default=None,
help=(
"Generic video filter. "
'Specify any key (see the "OUTPUT TEMPLATE" for a list of available keys) to '
"match if the key is present, "
"!key to check if the key is not present, "
'key > NUMBER (like "comment_count > 12", also works with '
">=, <, <=, !=, =) to compare against a number, "
"key = 'LITERAL' (like \"uploader = 'Mike Smith'\", also works with !=) "
"to match against a string literal "
"and & to require multiple matches. "
"Values which are not known are excluded unless you "
"put a question mark (?) after the operator. "
"For example, to only match videos that have been liked more than "
"100 times and disliked less than 50 times (or the dislike "
"functionality is not available at the given service), but who "
"also have a description, use --match-filter "
'"like_count > 100 & dislike_count <? 50 & description" .'
),
)
selection.add_option(
"--no-playlist",
action="store_true",
dest="noplaylist",
default=False,
help="Download only the video, if the URL refers to a video and a playlist.",
)
selection.add_option(
"--yes-playlist",
action="store_false",
dest="noplaylist",
default=False,
help="Download the playlist, if the URL refers to a video and a playlist.",
)
selection.add_option(
"--age-limit",
metavar="YEARS",
dest="age_limit",
default=None,
type=int,
help="Download only videos suitable for the given age",
)
selection.add_option(
"--download-archive",
metavar="FILE",
dest="download_archive",
help="Download only videos not listed in the archive file. Record the IDs of all downloaded videos in it.",
)
selection.add_option(
"--include-ads",
dest="include_ads",
action="store_true",
help="Download advertisements as well (experimental)",
)
authentication = optparse.OptionGroup(parser, "Authentication Options")
authentication.add_option(
"-u",
"--username",
dest="username",
metavar="USERNAME",
help="Login with this account ID",
)
authentication.add_option(
"-p",
"--password",
dest="password",
metavar="PASSWORD",
help="Account password. If this option is left out, youtube-dl will ask interactively.",
)
authentication.add_option(
"-2",
"--twofactor",
dest="twofactor",
metavar="TWOFACTOR",
help="Two-factor authentication code",
)
authentication.add_option(
"-n",
"--netrc",
action="store_true",
dest="usenetrc",
default=False,
help="Use .netrc authentication data",
)
authentication.add_option(
"--video-password",
dest="videopassword",
metavar="PASSWORD",
help="Video password (vimeo, youku)",
)
adobe_pass = optparse.OptionGroup(parser, "Adobe Pass Options")
adobe_pass.add_option(
"--ap-mso",
dest="ap_mso",
metavar="MSO",
help="Adobe Pass multiple-system operator (TV provider) identifier, use --ap-list-mso for a list of available MSOs",
)
adobe_pass.add_option(
"--ap-username",
dest="ap_username",
metavar="USERNAME",
help="Multiple-system operator account login",
)
adobe_pass.add_option(
"--ap-password",
dest="ap_password",
metavar="PASSWORD",
help="Multiple-system operator account password. If this option is left out, youtube-dl will ask interactively.",
)
adobe_pass.add_option(
"--ap-list-mso",
action="store_true",
dest="ap_list_mso",
default=False,
help="List all supported multiple-system operators",
)
video_format = optparse.OptionGroup(parser, "Video Format Options")
video_format.add_option(
"-f",
"--format",
action="store",
dest="format",
metavar="FORMAT",
default=None,
help='Video format code, see the "FORMAT SELECTION" for all the info',
)
video_format.add_option(
"--all-formats",
action="store_const",
dest="format",
const="all",
help="Download all available video formats",
)
video_format.add_option(
"--prefer-free-formats",
action="store_true",
dest="prefer_free_formats",
default=False,
help="Prefer free video formats unless a specific one is requested",
)
video_format.add_option(
"-F",
"--list-formats",
action="store_true",
dest="listformats",
help="List all available formats of requested videos",
)
video_format.add_option(
"--youtube-include-dash-manifest",
action="store_true",
dest="youtube_include_dash_manifest",
default=True,
help=optparse.SUPPRESS_HELP,
)
video_format.add_option(
"--youtube-skip-dash-manifest",
action="store_false",
dest="youtube_include_dash_manifest",
help="Do not download the DASH manifests and related data on YouTube videos",
)
video_format.add_option(
"--merge-output-format",
action="store",
dest="merge_output_format",
metavar="FORMAT",
default=None,
help=(
"If a merge is required (e.g. bestvideo+bestaudio), "
"output to given container format. One of mkv, mp4, ogg, webm, flv. "
"Ignored if no merge is required"
),
)
subtitles = optparse.OptionGroup(parser, "Subtitle Options")
subtitles.add_option(
"--write-sub",
"--write-srt",
action="store_true",
dest="writesubtitles",
default=False,
help="Write subtitle file",
)
subtitles.add_option(
"--write-auto-sub",
"--write-automatic-sub",
action="store_true",
dest="writeautomaticsub",
default=False,
help="Write automatically generated subtitle file (YouTube only)",
)
subtitles.add_option(
"--all-subs",
action="store_true",
dest="allsubtitles",
default=False,
help="Download all the available subtitles of the video",
)
subtitles.add_option(
"--list-subs",
action="store_true",
dest="listsubtitles",
default=False,
help="List all available subtitles for the video",
)
subtitles.add_option(
"--sub-format",
action="store",
dest="subtitlesformat",
metavar="FORMAT",
default="best",
help='Subtitle format, accepts formats preference, for example: "srt" or "ass/srt/best"',
)
subtitles.add_option(
"--sub-lang",
"--sub-langs",
"--srt-lang",
action="callback",
dest="subtitleslangs",
metavar="LANGS",
type="str",
default=[],
callback=_comma_separated_values_options_callback,
help="Languages of the subtitles to download (optional) separated by commas, use --list-subs for available language tags",
)
downloader = optparse.OptionGroup(parser, "Download Options")
downloader.add_option(
"-r",
"--limit-rate",
"--rate-limit",
dest="ratelimit",
metavar="RATE",
help="Maximum download rate in bytes per second (e.g. 50K or 4.2M)",
)
downloader.add_option(
"-R",
"--retries",
dest="retries",
metavar="RETRIES",
default=10,
help='Number of retries (default is %default), or "infinite".',
)
downloader.add_option(
"--fragment-retries",
dest="fragment_retries",
metavar="RETRIES",
default=10,
help='Number of retries for a fragment (default is %default), or "infinite" (DASH, hlsnative and ISM)',
)
downloader.add_option(
"--skip-unavailable-fragments",
action="store_true",
dest="skip_unavailable_fragments",
default=True,
help="Skip unavailable fragments (DASH, hlsnative and ISM)",
)
downloader.add_option(
"--abort-on-unavailable-fragment",
action="store_false",
dest="skip_unavailable_fragments",
help="Abort downloading when some fragment is not available",
)
downloader.add_option(
"--keep-fragments",
action="store_true",
dest="keep_fragments",
default=False,
help="Keep downloaded fragments on disk after downloading is finished; fragments are erased by default",
)
downloader.add_option(
"--buffer-size",
dest="buffersize",
metavar="SIZE",
default="1024",
help="Size of download buffer (e.g. 1024 or 16K) (default is %default)",
)
downloader.add_option(
"--no-resize-buffer",
action="store_true",
dest="noresizebuffer",
default=False,
help="Do not automatically adjust the buffer size. By default, the buffer size is automatically resized from an initial value of SIZE.",
)
downloader.add_option(
"--http-chunk-size",
dest="http_chunk_size",
metavar="SIZE",
default=None,
help="Size of a chunk for chunk-based HTTP downloading (e.g. 10485760 or 10M) (default is disabled). "
"May be useful for bypassing bandwidth throttling imposed by a webserver (experimental)",
)
downloader.add_option(
"--test",
action="store_true",
dest="test",
default=False,
help=optparse.SUPPRESS_HELP,
)
downloader.add_option(
"--playlist-reverse",
action="store_true",
help="Download playlist videos in reverse order",
)
downloader.add_option(
"--playlist-random",
action="store_true",
help="Download playlist videos in random order",
)
downloader.add_option(
"--xattr-set-filesize",
dest="xattr_set_filesize",
action="store_true",
help="Set file xattribute ytdl.filesize with expected file size",
)
downloader.add_option(
"--hls-prefer-native",
dest="hls_prefer_native",
action="store_true",
default=None,
help="Use the native HLS downloader instead of ffmpeg",
)
downloader.add_option(
"--hls-prefer-ffmpeg",
dest="hls_prefer_native",
action="store_false",
default=None,
help="Use ffmpeg instead of the native HLS downloader",
)
downloader.add_option(
"--hls-use-mpegts",
dest="hls_use_mpegts",
action="store_true",
help="Use the mpegts container for HLS videos, allowing to play the "
"video while downloading (some players may not be able to play it)",
)
downloader.add_option(
"--external-downloader",
dest="external_downloader",
metavar="COMMAND",
help="Use the specified external downloader. "
"Currently supports %s" % ",".join(list_external_downloaders()),
)
downloader.add_option(
"--external-downloader-args",
dest="external_downloader_args",
metavar="ARGS",
help="Give these arguments to the external downloader",
)
workarounds = optparse.OptionGroup(parser, "Workarounds")
workarounds.add_option(
"--encoding",
dest="encoding",
metavar="ENCODING",
help="Force the specified encoding (experimental)",
)
workarounds.add_option(
"--no-check-certificate",
action="store_true",
dest="no_check_certificate",
default=False,
help="Suppress HTTPS certificate validation",
)
workarounds.add_option(
"--prefer-insecure",
"--prefer-unsecure",
action="store_true",
dest="prefer_insecure",
help="Use an unencrypted connection to retrieve information about the video. (Currently supported only for YouTube)",
)
workarounds.add_option(
"--user-agent",
metavar="UA",
dest="user_agent",
help="Specify a custom user agent",
)
workarounds.add_option(
"--referer",
metavar="URL",
dest="referer",
default=None,
help="Specify a custom Referer: use if the video access is restricted to one domain",
)
workarounds.add_option(
"--add-header",
metavar="FIELD:VALUE",
dest="headers",
action="append",
help=(
"Specify a custom HTTP header and its value, separated by a colon ':'. You can use this option multiple times. "
"NB Use --cookies rather than adding a Cookie header if its contents may be sensitive; "
"data from a Cookie header will be sent to all domains, not just the one intended"
),
)
workarounds.add_option(
"--bidi-workaround",
dest="bidi_workaround",
action="store_true",
help="Work around terminals that lack bidirectional text support. Requires bidiv or fribidi executable in PATH",
)
workarounds.add_option(
"--sleep-interval",
"--min-sleep-interval",
metavar="SECONDS",
dest="sleep_interval",
type=float,
help=(
"Number of seconds to sleep before each download when used alone "
"or a lower bound of a range for randomized sleep before each download "
"(minimum possible number of seconds to sleep) when used along with "
"--max-sleep-interval."
),
)
workarounds.add_option(
"--max-sleep-interval",
metavar="SECONDS",
dest="max_sleep_interval",
type=float,
help=(
"Upper bound of a range for randomized sleep before each download "
"(maximum possible number of seconds to sleep). Must only be used "
"along with --min-sleep-interval."
),
)
verbosity = optparse.OptionGroup(parser, "Verbosity / Simulation Options")
verbosity.add_option(
"-q",
"--quiet",
action="store_true",
dest="quiet",
default=False,
help="Activate quiet mode",
)
verbosity.add_option(
"--no-warnings",
dest="no_warnings",
action="store_true",
default=False,
help="Ignore warnings",
)
verbosity.add_option(
"-s",
"--simulate",
action="store_true",
dest="simulate",
default=False,
help="Do not download the video and do not write anything to disk",
)
verbosity.add_option(
"--skip-download",
action="store_true",
dest="skip_download",
default=False,
help="Do not download the video",
)
verbosity.add_option(
"-g",
"--get-url",
action="store_true",
dest="geturl",
default=False,
help="Simulate, quiet but print URL",
)
verbosity.add_option(
"-e",
"--get-title",
action="store_true",
dest="gettitle",
default=False,
help="Simulate, quiet but print title",
)
verbosity.add_option(
"--get-id",
action="store_true",
dest="getid",
default=False,
help="Simulate, quiet but print id",
)
verbosity.add_option(
"--get-thumbnail",
action="store_true",
dest="getthumbnail",
default=False,
help="Simulate, quiet but print thumbnail URL",
)
verbosity.add_option(
"--get-description",
action="store_true",
dest="getdescription",
default=False,
help="Simulate, quiet but print video description",
)
verbosity.add_option(
"--get-duration",
action="store_true",
dest="getduration",
default=False,
help="Simulate, quiet but print video length",
)
verbosity.add_option(
"--get-filename",
action="store_true",
dest="getfilename",
default=False,
help="Simulate, quiet but print output filename",
)
verbosity.add_option(
"--get-format",
action="store_true",
dest="getformat",
default=False,
help="Simulate, quiet but print output format",
)
verbosity.add_option(
"-j",
"--dump-json",
action="store_true",
dest="dumpjson",
default=False,
help='Simulate, quiet but print JSON information. See the "OUTPUT TEMPLATE" for a description of available keys.',
)
verbosity.add_option(
"-J",
"--dump-single-json",
action="store_true",
dest="dump_single_json",
default=False,
help="Simulate, quiet but print JSON information for each command-line argument. If the URL refers to a playlist, dump the whole playlist information in a single line.",
)
verbosity.add_option(
"--print-json",
action="store_true",
dest="print_json",
default=False,
help="Be quiet and print the video information as JSON (video is still being downloaded).",
)
verbosity.add_option(
"--newline",
action="store_true",
dest="progress_with_newline",
default=False,
help="Output progress bar as new lines",
)
verbosity.add_option(
"--no-progress",
action="store_true",
dest="noprogress",
default=False,
help="Do not print progress bar",
)
verbosity.add_option(
"--console-title",
action="store_true",
dest="consoletitle",
default=False,
help="Display progress in console titlebar",
)
verbosity.add_option(
"-v",
"--verbose",
action="store_true",
dest="verbose",
default=False,
help="Print various debugging information",
)
verbosity.add_option(
"--dump-pages",
"--dump-intermediate-pages",
action="store_true",
dest="dump_intermediate_pages",
default=False,
help="Print downloaded pages encoded using base64 to debug problems (very verbose)",
)
verbosity.add_option(
"--write-pages",
action="store_true",
dest="write_pages",
default=False,
help="Write downloaded intermediary pages to files in the current directory to debug problems",
)
verbosity.add_option(
"--youtube-print-sig-code",
action="store_true",
dest="youtube_print_sig_code",
default=False,
help=optparse.SUPPRESS_HELP,
)
verbosity.add_option(
"--print-traffic",
"--dump-headers",
dest="debug_printtraffic",
action="store_true",
default=False,
help="Display sent and read HTTP traffic",
)
verbosity.add_option(
"-C",
"--call-home",
dest="call_home",
action="store_true",
default=False,
help="Contact the youtube-dl server for debugging",
)
verbosity.add_option(
"--no-call-home",
dest="call_home",
action="store_false",
default=False,
help="Do NOT contact the youtube-dl server for debugging",
)
filesystem = optparse.OptionGroup(parser, "Filesystem Options")
filesystem.add_option(
"-a",
"--batch-file",
dest="batchfile",
metavar="FILE",
help="File containing URLs to download ('-' for stdin), one URL per line. "
"Lines starting with '#', ';' or ']' are considered as comments and ignored.",
)
filesystem.add_option(
"--id",
default=False,
action="store_true",
dest="useid",
help="Use only video ID in file name",
)
filesystem.add_option(
"-o",
"--output",
dest="outtmpl",
metavar="TEMPLATE",
help=('Output filename template, see the "OUTPUT TEMPLATE" for all the info'),
)
filesystem.add_option(
"--output-na-placeholder",
dest="outtmpl_na_placeholder",
metavar="PLACEHOLDER",
default="NA",
help=(
'Placeholder value for unavailable meta fields in output filename template (default is "%default")'
),
)
filesystem.add_option(
"--autonumber-size",
dest="autonumber_size",
metavar="NUMBER",
type=int,
help=optparse.SUPPRESS_HELP,
)
filesystem.add_option(
"--autonumber-start",
dest="autonumber_start",
metavar="NUMBER",
default=1,
type=int,
help="Specify the start value for %(autonumber)s (default is %default)",
)
filesystem.add_option(
"--restrict-filenames",
action="store_true",
dest="restrictfilenames",
default=False,
help='Restrict filenames to only ASCII characters, and avoid "&" and spaces in filenames',
)
filesystem.add_option(
"-A",
"--auto-number",
action="store_true",
dest="autonumber",
default=False,
help=optparse.SUPPRESS_HELP,
)
filesystem.add_option(
"-t",
"--title",
action="store_true",
dest="usetitle",
default=False,
help=optparse.SUPPRESS_HELP,
)
filesystem.add_option(
"-l",
"--literal",
default=False,
action="store_true",
dest="usetitle",
help=optparse.SUPPRESS_HELP,
)
filesystem.add_option(
"-w",
"--no-overwrites",
action="store_true",
dest="nooverwrites",
default=False,
help="Do not overwrite files",
)
filesystem.add_option(
"-c",
"--continue",
action="store_true",
dest="continue_dl",
default=True,
help="Force resume of partially downloaded files. By default, youtube-dl will resume downloads if possible.",
)
filesystem.add_option(
"--no-continue",
action="store_false",
dest="continue_dl",
help="Do not resume partially downloaded files (restart from beginning)",
)
filesystem.add_option(
"--no-part",
action="store_true",
dest="nopart",
default=False,
help="Do not use .part files - write directly into output file",
)
filesystem.add_option(
"--mtime",
action="store_true",
dest="updatetime",
default=True,
help="Use the Last-modified header to set the file modification time (default)",
)
filesystem.add_option(
"--no-mtime",
action="store_false",
dest="updatetime",
help="Do not use the Last-modified header to set the file modification time",
)
filesystem.add_option(
"--write-description",
action="store_true",
dest="writedescription",
default=False,
help="Write video description to a .description file",
)
filesystem.add_option(
"--write-info-json",
action="store_true",
dest="writeinfojson",
default=False,
help="Write video metadata to a .info.json file",
)
filesystem.add_option(
"--write-annotations",
action="store_true",
dest="writeannotations",
default=False,
help="Write video annotations to a .annotations.xml file",
)
filesystem.add_option(
"--load-info-json",
"--load-info",
dest="load_info_filename",
metavar="FILE",
help='JSON file containing the video information (created with the "--write-info-json" option)',
)
filesystem.add_option(
"--cookies",
dest="cookiefile",
metavar="FILE",
help="File to read cookies from and dump cookie jar in",
)
filesystem.add_option(
"--cache-dir",
dest="cachedir",
default=None,
metavar="DIR",
help="Location in the filesystem where youtube-dl can store some downloaded information permanently. By default $XDG_CACHE_HOME/youtube-dl or ~/.cache/youtube-dl . At the moment, only YouTube player files (for videos with obfuscated signatures) are cached, but that may change.",
)
filesystem.add_option(
"--no-cache-dir",
action="store_const",
const=False,
dest="cachedir",
help="Disable filesystem caching",
)
filesystem.add_option(
"--rm-cache-dir",
action="store_true",
dest="rm_cachedir",
help="Delete all filesystem cache files",
)
thumbnail = optparse.OptionGroup(parser, "Thumbnail Options")
thumbnail.add_option(
"--write-thumbnail",
action="store_true",
dest="writethumbnail",
default=False,
help="Write thumbnail image to disk",
)
thumbnail.add_option(
"--write-all-thumbnails",
action="store_true",
dest="write_all_thumbnails",
default=False,
help="Write all thumbnail image formats to disk",
)
thumbnail.add_option(
"--list-thumbnails",
action="store_true",
dest="list_thumbnails",
default=False,
help="Simulate and list all available thumbnail formats",
)
postproc = optparse.OptionGroup(parser, "Post-processing Options")
postproc.add_option(
"-x",
"--extract-audio",
action="store_true",
dest="extractaudio",
default=False,
help="Convert video files to audio-only files (requires ffmpeg/avconv and ffprobe/avprobe)",
)
postproc.add_option(
"--audio-format",
metavar="FORMAT",
dest="audioformat",
default="best",
help='Specify audio format: "best", "aac", "flac", "mp3", "m4a", "opus", "vorbis", or "wav"; "%default" by default; No effect without -x',
)
postproc.add_option(
"--audio-quality",
metavar="QUALITY",
dest="audioquality",
default="5",
help="Specify ffmpeg/avconv audio quality, insert a value between 0 (better) and 9 (worse) for VBR or a specific bitrate like 128K (default %default)",
)
postproc.add_option(
"--recode-video",
metavar="FORMAT",
dest="recodevideo",
default=None,
help="Encode the video to another format if necessary (currently supported: mp4|flv|ogg|webm|mkv|avi)",
)
postproc.add_option(
"--postprocessor-args",
dest="postprocessor_args",
metavar="ARGS",
help="Give these arguments to the postprocessor (if postprocessing is required)",
)
postproc.add_option(
"-k",
"--keep-video",
action="store_true",
dest="keepvideo",
default=False,
help="Keep the video file on disk after the post-processing; the video is erased by default",
)
postproc.add_option(
"--no-post-overwrites",
action="store_true",
dest="nopostoverwrites",
default=False,
help="Do not overwrite post-processed files; the post-processed files are overwritten by default",
)
postproc.add_option(
"--embed-subs",
action="store_true",
dest="embedsubtitles",
default=False,
help="Embed subtitles in the video (only for mp4, webm and mkv videos)",
)
postproc.add_option(
"--embed-thumbnail",
action="store_true",
dest="embedthumbnail",
default=False,
help="Embed thumbnail in the audio as cover art",
)
postproc.add_option(
"--add-metadata",
action="store_true",
dest="addmetadata",
default=False,
help="Write metadata to the video file",
)
postproc.add_option(
"--metadata-from-title",
metavar="FORMAT",
dest="metafromtitle",
help="Parse additional metadata like song title / artist from the video title. "
"The format syntax is the same as --output. Regular expression with "
"named capture groups may also be used. "
"The parsed parameters replace existing values. "
'Example: --metadata-from-title "%(artist)s - %(title)s" matches a title like '
'"Coldplay - Paradise". '
'Example (regex): --metadata-from-title "(?P<artist>.+?) - (?P<title>.+)"',
)
postproc.add_option(
"--xattrs",
action="store_true",
dest="xattrs",
default=False,
help="Write metadata to the video file's xattrs (using dublin core and xdg standards)",
)
postproc.add_option(
"--fixup",
metavar="POLICY",
dest="fixup",
default="detect_or_warn",
help="Automatically correct known faults of the file. "
"One of never (do nothing), warn (only emit a warning), "
"detect_or_warn (the default; fix file if we can, warn otherwise)",
)
postproc.add_option(
"--prefer-avconv",
action="store_false",
dest="prefer_ffmpeg",
help="Prefer avconv over ffmpeg for running the postprocessors",
)
postproc.add_option(
"--prefer-ffmpeg",
action="store_true",
dest="prefer_ffmpeg",
help="Prefer ffmpeg over avconv for running the postprocessors (default)",
)
postproc.add_option(
"--ffmpeg-location",
"--avconv-location",
metavar="PATH",
dest="ffmpeg_location",
help="Location of the ffmpeg/avconv binary; either the path to the binary or its containing directory.",
)
postproc.add_option(
"--exec",
metavar="CMD",
dest="exec_cmd",
help="Execute a command on the file after downloading and post-processing, similar to find's -exec syntax. Example: --exec 'adb push {} /sdcard/Music/ && rm {}'",
)
postproc.add_option(
"--convert-subs",
"--convert-subtitles",
metavar="FORMAT",
dest="convertsubtitles",
default=None,
help="Convert the subtitles to other format (currently supported: srt|ass|vtt|lrc)",
)
parser.add_option_group(general)
parser.add_option_group(network)
parser.add_option_group(geo)
parser.add_option_group(selection)
parser.add_option_group(downloader)
parser.add_option_group(filesystem)
parser.add_option_group(thumbnail)
parser.add_option_group(verbosity)
parser.add_option_group(workarounds)
parser.add_option_group(video_format)
parser.add_option_group(subtitles)
parser.add_option_group(authentication)
parser.add_option_group(adobe_pass)
parser.add_option_group(postproc)
if overrideArguments is not None:
opts, args = parser.parse_args(overrideArguments)
if opts.verbose:
write_string("[debug] Override config: " + repr(overrideArguments) + "\n")
else:
def compat_conf(conf):
if sys.version_info < (3,):
return [a.decode(preferredencoding(), "replace") for a in conf]
return conf
command_line_conf = compat_conf(sys.argv[1:])
opts, args = parser.parse_args(command_line_conf)
system_conf = user_conf = custom_conf = []
if "--config-location" in command_line_conf:
location = compat_expanduser(opts.config_location)
if os.path.isdir(location):
location = os.path.join(location, "youtube-dl.conf")
if not os.path.exists(location):
parser.error("config-location %s does not exist." % location)
custom_conf = _readOptions(location)
elif "--ignore-config" in command_line_conf:
pass
else:
system_conf = _readOptions("/etc/youtube-dl.conf")
if "--ignore-config" not in system_conf:
user_conf = _readUserConf()
argv = system_conf + user_conf + custom_conf + command_line_conf
opts, args = parser.parse_args(argv)
if opts.verbose:
for conf_label, conf in (
("System config", system_conf),
("User config", user_conf),
("Custom config", custom_conf),
("Command-line args", command_line_conf),
):
write_string(
"[debug] %s: %s\n" % (conf_label, repr(_hide_login_info(conf)))
)
return parser, opts, args
|
BOPTools | JoinFeatures | # /***************************************************************************
# * Copyright (c) 2016 Victor Titov (DeepSOIC) <vv.titov@gmail.com> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This library is free software; you can redistribute it and/or *
# * modify it under the terms of the GNU Library General Public *
# * License as published by the Free Software Foundation; either *
# * version 2 of the License, or (at your option) any later version. *
# * *
# * This library is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this library; see the file COPYING.LIB. If not, *
# * write to the Free Software Foundation, Inc., 59 Temple Place, *
# * Suite 330, Boston, MA 02111-1307, USA *
# * *
# ***************************************************************************/
__title__ = "BOPTools.JoinFeatures module"
__author__ = "DeepSOIC"
__url__ = "http://www.freecad.org"
__doc__ = "Implementation of document objects (features) for connect, ebmed and cutout operations."
import FreeCAD
import Part
from . import JoinAPI
if FreeCAD.GuiUp:
import FreeCADGui
from PySide import QtCore, QtGui
# -------------------------- common stuff -------------------------------------
# -------------------------- translation-related code -------------------------
# Thanks, yorik! (see forum thread "A new Part tool is being born... JoinFeatures!"
# http://forum.freecad.org/viewtopic.php?f=22&t=11112&start=30#p90239 )
try:
_fromUtf8 = QtCore.QString.fromUtf8
except Exception:
def _fromUtf8(s):
return s
translate = FreeCAD.Qt.translate
# --------------------------/translation-related code -------------------------
def getParamRefine():
return FreeCAD.ParamGet(
"User parameter:BaseApp/Preferences/Mod/Part/Boolean"
).GetBool("RefineModel")
def cmdCreateJoinFeature(name, mode):
"""cmdCreateJoinFeature(name, mode): generalized implementation of GUI commands."""
sel = FreeCADGui.Selection.getSelectionEx()
FreeCAD.ActiveDocument.openTransaction("Create " + mode)
FreeCADGui.addModule("BOPTools.JoinFeatures")
FreeCADGui.doCommand(
"j = BOPTools.JoinFeatures.make{mode}(name='{name}')".format(
mode=mode, name=name
)
)
if mode == "Embed" or mode == "Cutout":
FreeCADGui.doCommand("j.Base = App.ActiveDocument." + sel[0].Object.Name)
FreeCADGui.doCommand("j.Tool = App.ActiveDocument." + sel[1].Object.Name)
elif mode == "Connect":
FreeCADGui.doCommand(
"j.Objects = {sel}".format(
sel="["
+ ", ".join(["App.ActiveDocument." + so.Object.Name for so in sel])
+ "]"
)
)
else:
raise ValueError(
"cmdCreateJoinFeature: Unexpected mode {mode}".format(mode=repr(mode))
)
try:
FreeCADGui.doCommand("j.Proxy.execute(j)")
FreeCADGui.doCommand("j.purgeTouched()")
except Exception as err:
mb = QtGui.QMessageBox()
mb.setIcon(mb.Icon.Warning)
error_text1 = translate(
"Part_JoinFeatures", "Computing the result failed with an error:"
)
error_text2 = translate(
"Part_JoinFeatures",
"Click 'Continue' to create the feature anyway, or 'Abort' to cancel.",
)
mb.setText(error_text1 + "\n\n" + str(err) + "\n\n" + error_text2)
mb.setWindowTitle(translate("Part_JoinFeatures", "Bad selection", None))
btnAbort = mb.addButton(QtGui.QMessageBox.StandardButton.Abort)
btnOK = mb.addButton(
translate("Part_JoinFeatures", "Continue", None),
QtGui.QMessageBox.ButtonRole.ActionRole,
)
mb.setDefaultButton(btnOK)
mb.exec_()
if mb.clickedButton() is btnAbort:
FreeCAD.ActiveDocument.abortTransaction()
return
FreeCADGui.doCommand(
"for obj in j.ViewObject.Proxy.claimChildren():\n" " obj.ViewObject.hide()"
)
FreeCAD.ActiveDocument.commitTransaction()
def getIconPath(icon_dot_svg):
return icon_dot_svg
# -------------------------- /common stuff ------------------------------------
# -------------------------- Connect ------------------------------------------
def makeConnect(name):
"""makeConnect(name): makes an Connect object."""
obj = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", name)
FeatureConnect(obj)
if FreeCAD.GuiUp:
ViewProviderConnect(obj.ViewObject)
return obj
class FeatureConnect:
"""The PartJoinFeature object."""
def __init__(self, obj):
obj.addProperty(
"App::PropertyLinkList", "Objects", "Connect", "Object to be connected."
)
obj.addProperty(
"App::PropertyBool",
"Refine",
"Connect",
"True = refine resulting shape. False = output as is.",
)
obj.Refine = getParamRefine()
obj.addProperty(
"App::PropertyLength",
"Tolerance",
"Connect",
"Tolerance when intersecting (fuzzy value). "
"In addition to tolerances of the shapes.",
)
obj.Proxy = self
self.Type = "FeatureConnect"
def execute(self, selfobj):
rst = JoinAPI.connect([obj.Shape for obj in selfobj.Objects], selfobj.Tolerance)
if selfobj.Refine:
rst = rst.removeSplitter()
selfobj.Shape = rst
class ViewProviderConnect:
"""A View Provider for the Part Connect feature."""
def __init__(self, vobj):
vobj.Proxy = self
def getIcon(self):
return getIconPath("Part_JoinConnect.svg")
def attach(self, vobj):
self.ViewObject = vobj
self.Object = vobj.Object
def dumps(self):
return None
def loads(self, state):
return None
def claimChildren(self):
return self.Object.Objects
def onDelete(self, feature, subelements):
try:
for obj in self.claimChildren():
obj.ViewObject.show()
except Exception as err:
FreeCAD.Console.PrintError("Error in onDelete: " + str(err))
return True
def canDragObjects(self):
return True
def canDropObjects(self):
return True
def canDragObject(self, dragged_object):
return True
def canDropObject(self, incoming_object):
return hasattr(incoming_object, "Shape")
def dragObject(self, selfvp, dragged_object):
objs = self.Object.Objects
objs.remove(dragged_object)
self.Object.Objects = objs
def dropObject(self, selfvp, incoming_object):
self.Object.Objects = self.Object.Objects + [incoming_object]
class CommandConnect:
"""Command to create Connect feature."""
def GetResources(self):
return {
"Pixmap": getIconPath("Part_JoinConnect.svg"),
"MenuText": QtCore.QT_TRANSLATE_NOOP("Part_JoinConnect", "Connect objects"),
"Accel": "",
"ToolTip": QtCore.QT_TRANSLATE_NOOP(
"Part_JoinConnect", "Fuses objects, taking care to preserve voids."
),
}
def Activated(self):
if len(FreeCADGui.Selection.getSelectionEx()) >= 1:
cmdCreateJoinFeature(name="Connect", mode="Connect")
else:
mb = QtGui.QMessageBox()
mb.setIcon(mb.Icon.Warning)
mb.setText(
translate(
"Part_JoinFeatures",
"Select at least two objects, or one or more compounds",
None,
)
)
mb.setWindowTitle(translate("Part_JoinFeatures", "Bad selection", None))
mb.exec_()
def IsActive(self):
if FreeCAD.ActiveDocument:
return True
else:
return False
# -------------------------- /Connect -----------------------------------------
# -------------------------- Embed --------------------------------------------
def makeEmbed(name):
"""makeEmbed(name): makes an Embed object."""
obj = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", name)
FeatureEmbed(obj)
if FreeCAD.GuiUp:
ViewProviderEmbed(obj.ViewObject)
return obj
class FeatureEmbed:
"""The Part Embed object."""
def __init__(self, obj):
obj.addProperty("App::PropertyLink", "Base", "Embed", "Object to embed into.")
obj.addProperty("App::PropertyLink", "Tool", "Embed", "Object to be embedded.")
obj.addProperty(
"App::PropertyBool",
"Refine",
"Embed",
"True = refine resulting shape. False = output as is.",
)
obj.Refine = getParamRefine()
obj.addProperty(
"App::PropertyLength",
"Tolerance",
"Embed",
"Tolerance when intersecting (fuzzy value). "
"In addition to tolerances of the shapes.",
)
obj.Proxy = self
self.Type = "FeatureEmbed"
def execute(self, selfobj):
rst = JoinAPI.embed_legacy(
selfobj.Base.Shape, selfobj.Tool.Shape, selfobj.Tolerance
)
if selfobj.Refine:
rst = rst.removeSplitter()
selfobj.Shape = rst
class ViewProviderEmbed:
"""A View Provider for the Part Embed feature."""
def __init__(self, vobj):
vobj.Proxy = self
def getIcon(self):
return getIconPath("Part_JoinEmbed.svg")
def attach(self, vobj):
self.ViewObject = vobj
self.Object = vobj.Object
def dumps(self):
return None
def loads(self, state):
return None
def claimChildren(self):
return [self.Object.Base, self.Object.Tool]
def onDelete(self, feature, subelements):
try:
self.Object.Base.ViewObject.show()
self.Object.Tool.ViewObject.show()
except Exception as err:
FreeCAD.Console.PrintError("Error in onDelete: " + str(err))
return True
class CommandEmbed:
"""Command to create Part Embed feature."""
def GetResources(self):
return {
"Pixmap": getIconPath("Part_JoinEmbed.svg"),
"MenuText": QtCore.QT_TRANSLATE_NOOP("Part_JoinEmbed", "Embed object"),
"Accel": "",
"ToolTip": QtCore.QT_TRANSLATE_NOOP(
"Part_JoinEmbed",
"Fuses one object into another, taking care to preserve voids.",
),
}
def Activated(self):
if len(FreeCADGui.Selection.getSelectionEx()) == 2:
cmdCreateJoinFeature(name="Embed", mode="Embed")
else:
mb = QtGui.QMessageBox()
mb.setIcon(mb.Icon.Warning)
mb.setText(
translate(
"Part_JoinFeatures",
"Select base object, then the object to embed, and then invoke this tool.",
None,
)
)
mb.setWindowTitle(translate("Part_JoinFeatures", "Bad selection", None))
mb.exec_()
def IsActive(self):
if FreeCAD.ActiveDocument:
return True
else:
return False
# -------------------------- /Embed -------------------------------------------
# -------------------------- Cutout -------------------------------------------
def makeCutout(name):
"""makeCutout(name): makes an Cutout object."""
obj = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", name)
FeatureCutout(obj)
if FreeCAD.GuiUp:
ViewProviderCutout(obj.ViewObject)
return obj
class FeatureCutout:
"""The Part Cutout object."""
def __init__(self, obj):
obj.addProperty("App::PropertyLink", "Base", "Cutout", "Object to be cut.")
obj.addProperty(
"App::PropertyLink", "Tool", "Cutout", "Object to make cutout for."
)
obj.addProperty(
"App::PropertyBool",
"Refine",
"Cutout",
"True = refine resulting shape. False = output as is.",
)
obj.Refine = getParamRefine()
obj.addProperty(
"App::PropertyLength",
"Tolerance",
"Cutout",
"Tolerance when intersecting (fuzzy value). In addition to tolerances of the shapes.",
)
obj.Proxy = self
self.Type = "FeatureCutout"
def execute(self, selfobj):
rst = JoinAPI.cutout_legacy(
selfobj.Base.Shape, selfobj.Tool.Shape, selfobj.Tolerance
)
if selfobj.Refine:
rst = rst.removeSplitter()
selfobj.Shape = rst
class ViewProviderCutout:
"""A View Provider for the Part Cutout feature."""
def __init__(self, vobj):
vobj.Proxy = self
def getIcon(self):
return getIconPath("Part_JoinCutout.svg")
def attach(self, vobj):
self.ViewObject = vobj
self.Object = vobj.Object
def dumps(self):
return None
def loads(self, state):
return None
def claimChildren(self):
return [self.Object.Base, self.Object.Tool]
def onDelete(self, feature, subelements):
try:
self.Object.Base.ViewObject.show()
self.Object.Tool.ViewObject.show()
except Exception as err:
FreeCAD.Console.PrintError("Error in onDelete: " + str(err))
return True
class CommandCutout:
"""Command to create PartJoinFeature in Cutout mode."""
def GetResources(self):
return {
"Pixmap": getIconPath("Part_JoinCutout.svg"),
"MenuText": QtCore.QT_TRANSLATE_NOOP(
"Part_JoinCutout", "Cutout for object"
),
"Accel": "",
"ToolTip": QtCore.QT_TRANSLATE_NOOP(
"Part_JoinCutout", "Makes a cutout in one object to fit another object."
),
}
def Activated(self):
if len(FreeCADGui.Selection.getSelectionEx()) == 2:
cmdCreateJoinFeature(name="Cutout", mode="Cutout")
else:
mb = QtGui.QMessageBox()
mb.setIcon(mb.Icon.Warning)
mb.setText(
translate(
"Part_JoinFeatures",
"Select the object to make a cutout in, then the object that should fit into the cutout, and then invoke this tool.",
None,
)
)
mb.setWindowTitle(translate("Part_JoinFeatures", "Bad selection", None))
mb.exec_()
def IsActive(self):
if FreeCAD.ActiveDocument:
return True
else:
return False
# -------------------------- /Cutout ------------------------------------------
def addCommands():
FreeCADGui.addCommand("Part_JoinCutout", CommandCutout())
FreeCADGui.addCommand("Part_JoinEmbed", CommandEmbed())
FreeCADGui.addCommand("Part_JoinConnect", CommandConnect())
|
neubot | utils | # neubot/utils.py
#
# Copyright (c) 2010-2011 Simone Basso <bassosimone@gmail.com>,
# NEXA Center for Internet & Society at Politecnico di Torino
#
# This file is part of Neubot <http://www.neubot.org/>.
#
# Neubot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Neubot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Neubot. If not, see <http://www.gnu.org/licenses/>.
#
""" Miscellaneous utility functions """
import os
import sys
import time
import types
import uuid
def safe_seek(afile, offset, whence=os.SEEK_SET):
"""Seek() implementation that does not throw IOError when
@afile is a console device."""
#
# When stdin, stdout, stderr are attached to console, seek(0)
# fails because it's not possible to rewind a console device.
# So, do not re-raise the Exception if the offending file was
# one of stdin, stdout, stderr.
#
try:
afile.seek(offset, whence)
except IOError:
if afile not in (sys.stdin, sys.stdout, sys.stderr):
raise
#
# Unit formatter
#
# base 2
KIBI = (1024.0, "Ki")
MEBI = (1048576.0, "Mi")
GIBI = (1073741824.0, "Gi")
# base 10
KILO = (1000.0, "K")
MEGA = (1000000.0, "M")
GIGA = (1000000000.0, "G")
def _unit_formatter(number, unit_info, unit_name):
"""Internal unit formatter"""
for scale, suffix in unit_info:
if number >= scale:
number /= scale
return "%.1f %s%s" % (number, suffix, unit_name)
return "%.1f %s" % (number, unit_name)
def unit_formatter(number, base10=False, unit=""):
"""Unit formatter"""
if base10:
return _unit_formatter(number, (GIGA, MEGA, KILO), unit)
else:
return _unit_formatter(number, (GIBI, MEBI, KIBI), unit)
def speed_formatter(speed, base10=True, bytez=False):
"""Speed formatter"""
unit = "Byte/s"
if not bytez:
speed = speed * 8
unit = "bit/s"
return unit_formatter(speed, base10, unit)
def time_formatter(number):
"""Time formatter"""
if number >= 1.0:
return "%.1f s" % number
elif number >= 0.001:
number *= 1000
return "%.1f ms" % number
elif number >= 0.000001:
number *= 1000000
return "%.1f us" % number
else:
number *= 1000000
return "%e us" % number
# Coerce types
def asciiify(string):
"""Convert something to ASCII string"""
return string.encode("ascii")
def stringify(value):
"""Convert something to string"""
if type(value) == types.UnicodeType:
return value.encode("utf-8")
elif type(value) == types.StringType:
return value
else:
return str(value)
def unicodize(value):
"""Convert something to unicode"""
if type(value) == types.UnicodeType:
return value
elif type(value) == types.StringType:
return value.decode("utf-8")
else:
return unicode(value)
def intify(string):
"""Convert something to integer"""
if type(string) == types.StringType or type(string) == types.UnicodeType:
if string.lower() in ("off", "false", "no"):
return 0
elif string.lower() in ("on", "true", "yes"):
return 1
return int(string)
def smart_cast(value):
"""Return the proper cast depending on value"""
if type(value) == types.StringType:
return stringify
elif type(value) == types.UnicodeType:
return unicodize
elif type(value) == types.BooleanType:
return intify
elif type(value) == types.IntType:
return intify
elif type(value) == types.LongType:
return intify
elif type(value) == types.FloatType:
return float
else:
raise TypeError("No such cast for this type")
def timestamp():
"""Returns an integer representing the number of seconds elapsed
since the EPOCH in UTC"""
return int(time.time())
if os.name == "nt":
__TICKS = time.clock
elif os.name == "posix":
__TICKS = time.time
else:
raise RuntimeError("Operating system not supported")
def ticks():
"""Returns a real representing the most precise clock available
on the current platform. Note that, depending on the platform,
the returned value MIGHT NOT be a timestamp. So, you MUST
use this clock to calculate the time elapsed between two events
ONLY, and you must not use it with timestamp semantics."""
return __TICKS()
#
# T()
# Returns the opaque time, i.e. the time used to identify
# events by the web user interface. This is an integer, and
# is calculated as follows: ``int(10^6 * ticks())``. So,
# the same caveat regarding ticks() also applies to this
# function.
#
T = lambda: int(1000000 * ticks())
def get_uuid():
"""Returns per-Neubot random unique identifier.
Each Neubot is identified by an anonymous unique random ID,
which allows to perform time series analysis."""
return str(uuid.uuid4())
|
builders | matcher_builder_test | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for matcher_builder."""
import tensorflow as tf
from app.object_detection.builders import matcher_builder
from app.object_detection.matchers import argmax_matcher, bipartite_matcher
from app.object_detection.protos import matcher_pb2
from google.protobuf import text_format
class MatcherBuilderTest(tf.test.TestCase):
def test_build_arg_max_matcher_with_defaults(self):
matcher_text_proto = """
argmax_matcher {
}
"""
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
matcher_object = matcher_builder.build(matcher_proto)
self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher))
self.assertAlmostEqual(matcher_object._matched_threshold, 0.5)
self.assertAlmostEqual(matcher_object._unmatched_threshold, 0.5)
self.assertTrue(matcher_object._negatives_lower_than_unmatched)
self.assertFalse(matcher_object._force_match_for_each_row)
def test_build_arg_max_matcher_without_thresholds(self):
matcher_text_proto = """
argmax_matcher {
ignore_thresholds: true
}
"""
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
matcher_object = matcher_builder.build(matcher_proto)
self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher))
self.assertEqual(matcher_object._matched_threshold, None)
self.assertEqual(matcher_object._unmatched_threshold, None)
self.assertTrue(matcher_object._negatives_lower_than_unmatched)
self.assertFalse(matcher_object._force_match_for_each_row)
def test_build_arg_max_matcher_with_non_default_parameters(self):
matcher_text_proto = """
argmax_matcher {
matched_threshold: 0.7
unmatched_threshold: 0.3
negatives_lower_than_unmatched: false
force_match_for_each_row: true
}
"""
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
matcher_object = matcher_builder.build(matcher_proto)
self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher))
self.assertAlmostEqual(matcher_object._matched_threshold, 0.7)
self.assertAlmostEqual(matcher_object._unmatched_threshold, 0.3)
self.assertFalse(matcher_object._negatives_lower_than_unmatched)
self.assertTrue(matcher_object._force_match_for_each_row)
def test_build_bipartite_matcher(self):
matcher_text_proto = """
bipartite_matcher {
}
"""
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
matcher_object = matcher_builder.build(matcher_proto)
self.assertTrue(
isinstance(matcher_object, bipartite_matcher.GreedyBipartiteMatcher)
)
def test_raise_error_on_empty_matcher(self):
matcher_text_proto = """
"""
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
with self.assertRaises(ValueError):
matcher_builder.build(matcher_proto)
if __name__ == "__main__":
tf.test.main()
|
builtinContextMenus | droneAddStack | import gui.fitCommands as cmd
import gui.mainFrame
import wx
from gui.contextMenu import ContextMenuSingle
from gui.fitCommands.helpers import droneStackLimit
from service.fit import Fit
_t = wx.GetTranslation
class DroneAddStack(ContextMenuSingle):
def __init__(self):
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
def display(self, callingWindow, srcContext, mainItem):
if srcContext not in ("marketItemGroup", "marketItemMisc"):
return False
if self.mainFrame.getActiveFit() is None:
return False
if mainItem is None:
return False
if mainItem.category.name != "Drone":
return False
fitID = self.mainFrame.getActiveFit()
fit = Fit.getInstance().getFit(fitID)
amount = droneStackLimit(fit, mainItem)
if amount < 1:
return False
self.amount = amount
return True
def getText(self, callingWindow, itmContext, mainItem):
return _t("Add {} to Drone Bay{}").format(
itmContext, "" if self.amount == 1 else " (x{})".format(self.amount)
)
def activate(self, callingWindow, fullContext, mainItem, i):
command = cmd.GuiAddLocalDroneCommand(
fitID=self.mainFrame.getActiveFit(),
itemID=int(mainItem.ID),
amount=self.amount,
)
if self.mainFrame.command.Submit(command):
self.mainFrame.additionsPane.select("Drones", focus=False)
DroneAddStack.register()
|
femexamples | constraint_transform_torque | # ***************************************************************************
# * Copyright (c) 2020 Sudhanshu Dubey <sudhanshu.thethunder@gmail.com> *
# * Copyright (c) 2020 Bernd Hahnebach <bernd@bimstatik.org> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
# to run the example use:
"""
from femexamples.constraint_transform_torque import setup()
setup()
"""
# constraint transform with a constraint force
# https://forum.freecad.org/viewtopic.php?t=19037
# https://forum.freecad.org/viewtopic.php?t=18970
import Fem
import FreeCAD
import ObjectsFem
from Part import makeLine
from . import manager
from .manager import get_meshname, init_doc
def get_information():
return {
"name": "Constraint Transform Torque",
"meshtype": "solid",
"meshelement": "Tet10",
"constraints": ["fixed", "force", "transform"],
"solvers": ["calculix", "ccxtools"],
"material": "solid",
"equations": ["mechanical"],
}
def get_explanation(header=""):
return (
header
+ """
To run the example from Python console use:
from femexamples.constraint_transform_torque import setup
setup()
See forum topic post:
https://forum.freecad.org/viewtopic.php?f=18&t=19037&start=10#p515447
https://forum.freecad.org/viewtopic.php?t=19037
https://forum.freecad.org/viewtopic.php?t=18970
constraint transform with a constraint force
"""
)
def setup(doc=None, solvertype="ccxtools"):
if doc is None:
doc = init_doc()
# explanation object
# just keep the following line and change text string in get_explanation method
manager.add_explanation_obj(
doc, get_explanation(manager.get_header(get_information()))
)
# line for load direction
sh_load_line = makeLine(FreeCAD.Vector(0, 0, 0), FreeCAD.Vector(0, 10, 0))
load_line = doc.addObject("Part::Feature", "Load_direction_line")
load_line.Shape = sh_load_line
doc.recompute()
if FreeCAD.GuiUp:
load_line.ViewObject.LineWidth = 5.0
load_line.ViewObject.LineColor = (1.0, 0.0, 0.0)
# geometry object
# name is important because the other method in this module use obj name
cylinder1 = doc.addObject("Part::Cylinder", "Cylinder1")
cylinder1.Height = "50 mm"
cylinder1.Radius = "5 mm"
cylinder2 = doc.addObject("Part::Cylinder", "Cylinder2")
cylinder2.Height = "50 mm"
cylinder2.Radius = "4 mm"
geom_obj = doc.addObject("Part::Cut", "Cut")
geom_obj.Base = cylinder1
geom_obj.Tool = cylinder2
doc.recompute()
if FreeCAD.GuiUp:
geom_obj.ViewObject.Document.activeView().viewAxonometric()
geom_obj.ViewObject.Document.activeView().fitAll()
# analysis
analysis = ObjectsFem.makeAnalysis(doc, "Analysis")
# solver
if solvertype == "calculix":
solver_obj = ObjectsFem.makeSolverCalculix(doc, "SolverCalculiX")
elif solvertype == "ccxtools":
solver_obj = ObjectsFem.makeSolverCalculixCcxTools(doc, "CalculiXccxTools")
solver_obj.WorkingDir = ""
else:
FreeCAD.Console.PrintWarning(
"Unknown or unsupported solver type: {}. "
"No solver object was created.\n".format(solvertype)
)
if solvertype == "calculix" or solvertype == "ccxtools":
solver_obj.AnalysisType = "static"
solver_obj.GeometricalNonlinearity = "linear"
solver_obj.ThermoMechSteadyState = False
solver_obj.MatrixSolverType = "default"
solver_obj.IterationsControlParameterTimeUse = False
solver_obj.SplitInputWriter = False
analysis.addObject(solver_obj)
# material
material_obj = ObjectsFem.makeMaterialSolid(doc, "MechanicalMaterial")
mat = material_obj.Material
mat["Name"] = "Calculix-Steel"
mat["YoungsModulus"] = "210000 MPa"
mat["PoissonRatio"] = "0.30"
material_obj.Material = mat
analysis.addObject(material_obj)
# constraint fixed
con_fixed = ObjectsFem.makeConstraintFixed(doc, "ConstraintFixed")
con_fixed.References = [(geom_obj, "Face3")]
analysis.addObject(con_fixed)
# constraint force
con_force = ObjectsFem.makeConstraintForce(doc, "ConstraintForce")
con_force.References = [(geom_obj, "Face1")]
con_force.Force = 2500.0 # 2500 N = 2.5 kN
con_force.Direction = (load_line, ["Edge1"])
con_force.Reversed = True
analysis.addObject(con_force)
# constraint transform
con_transform = ObjectsFem.makeConstraintTransform(doc, name="ConstraintTransform")
con_transform.References = [(geom_obj, "Face1")]
con_transform.TransformType = "Cylindrical"
con_transform.X_rot = 0.0
con_transform.Y_rot = 0.0
con_transform.Z_rot = 0.0
analysis.addObject(con_transform)
# mesh
from .meshes.mesh_transform_torque_tetra10 import create_elements, create_nodes
fem_mesh = Fem.FemMesh()
control = create_nodes(fem_mesh)
if not control:
FreeCAD.Console.PrintError("Error on creating nodes.\n")
control = create_elements(fem_mesh)
if not control:
FreeCAD.Console.PrintError("Error on creating elements.\n")
femmesh_obj = analysis.addObject(ObjectsFem.makeMeshGmsh(doc, get_meshname()))[0]
femmesh_obj.FemMesh = fem_mesh
femmesh_obj.Part = geom_obj
femmesh_obj.SecondOrderLinear = False
doc.recompute()
return doc
|
sabnzbd | bpsmeter | #!/usr/bin/python3 -OO
# Copyright 2007-2023 The SABnzbd-Team (sabnzbd.org)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
sabnzbd.bpsmeter - bpsmeter
"""
import logging
import re
import time
from typing import Dict, List, Optional
import sabnzbd
import sabnzbd.cfg as cfg
from sabnzbd.constants import BYTES_FILE_NAME, KIBI
from sabnzbd.misc import to_units
DAY = float(24 * 60 * 60)
WEEK = DAY * 7
DAYS = (0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)
BPS_LIST_MAX = 275
RE_DAY = re.compile(r"^\s*(\d+)[^:]*")
RE_HHMM = re.compile(r"(\d+):(\d+)\s*$")
def tomorrow(t: float) -> float:
"""Return timestamp for tomorrow (midnight)"""
now = time.localtime(t)
ntime = (now[0], now[1], now[2], 0, 0, 0, now[6], now[7], now[8])
return time.mktime(ntime) + DAY
def this_week(t: float) -> float:
"""Return timestamp for start of this week (monday)"""
while 1:
tm = time.localtime(t)
if tm.tm_wday == 0:
break
t -= DAY
monday = (tm.tm_year, tm.tm_mon, tm.tm_mday, 0, 0, 0, 0, 0, tm.tm_isdst)
return time.mktime(monday)
def next_week(t: float) -> float:
"""Return timestamp for start of next week (monday)"""
return this_week(t) + WEEK
def this_month(t: float) -> float:
"""Return timestamp for start of next month"""
now = time.localtime(t)
ntime = (now[0], now[1], 1, 0, 0, 0, 0, 0, now[8])
return time.mktime(ntime)
def last_month_day(tm: time.struct_time) -> int:
"""Return last day of this month"""
year, month = tm[:2]
day = DAYS[month]
# This simple formula for leap years is good enough
if day == 28 and (year % 4) == 0:
day = 29
return day
def next_month(t: float) -> float:
"""Return timestamp for start of next month"""
now = time.localtime(t)
month = now.tm_mon + 1
year = now.tm_year
if month > 12:
month = 1
year += 1
ntime = (year, month, 1, 0, 0, 0, 0, 0, now[8])
return time.mktime(ntime)
class BPSMeter:
__slots__ = (
"start_time",
"log_time",
"speed_log_time",
"last_update",
"bps",
"bps_list",
"server_bps",
"cached_amount",
"sum_cached_amount",
"day_total",
"week_total",
"month_total",
"grand_total",
"timeline_total",
"article_stats_tried",
"article_stats_failed",
"delayed_assembler",
"day_label",
"end_of_day",
"end_of_week",
"end_of_month",
"q_day",
"q_period",
"quota",
"left",
"have_quota",
"q_time",
"q_hour",
"q_minute",
"quota_enabled",
)
def __init__(self):
t = time.time()
self.start_time = t
self.log_time = t
self.speed_log_time = t
self.last_update = t
self.bps = 0.0
self.bps_list: List[int] = []
self.server_bps: Dict[str, float] = {}
self.cached_amount: Dict[str, int] = {}
self.sum_cached_amount: int = 0
self.day_total: Dict[str, int] = {}
self.week_total: Dict[str, int] = {}
self.month_total: Dict[str, int] = {}
self.grand_total: Dict[str, int] = {}
self.timeline_total: Dict[str, Dict[str, int]] = {}
self.article_stats_tried: Dict[str, Dict[str, int]] = {}
self.article_stats_failed: Dict[str, Dict[str, int]] = {}
self.delayed_assembler: int = 0
self.day_label: str = time.strftime("%Y-%m-%d")
self.end_of_day: float = tomorrow(t) # Time that current day will end
self.end_of_week: float = next_week(t) # Time that current day will end
self.end_of_month: float = next_month(t) # Time that current month will end
self.q_day = 1 # Day of quota reset
self.q_period = "m" # Daily/Weekly/Monthly quota = d/w/m
self.quota = 0.0 # Quota
self.left = 0.0 # Remaining quota
self.have_quota = False # Flag for quota active
self.q_time = 0 # Next reset time for quota
self.q_hour = 0 # Quota reset hour
self.q_minute = 0 # Quota reset minute
self.quota_enabled: bool = True # Scheduled quota enable/disable
def save(self):
"""Save admin to disk"""
sabnzbd.filesystem.save_admin(
(
self.last_update,
self.grand_total,
self.day_total,
self.week_total,
self.month_total,
self.end_of_day,
self.end_of_week,
self.end_of_month,
self.quota,
self.left,
self.q_time,
self.timeline_total,
self.article_stats_tried,
self.article_stats_failed,
),
BYTES_FILE_NAME,
)
def defaults(self):
"""Get the latest data from the database and assign to a fake server"""
logging.debug("Setting default BPS meter values")
with sabnzbd.database.HistoryDB() as history_db:
grand, month, week = history_db.get_history_size()
self.grand_total = {}
self.month_total = {}
self.week_total = {}
self.day_total = {}
if grand:
self.grand_total["x"] = grand
if month:
self.month_total["x"] = month
if week:
self.week_total["x"] = week
self.quota = self.left = cfg.quota_size.get_float()
def read(self):
"""Read admin from disk, return True when pause is needed"""
res = False
quota = self.left = cfg.quota_size.get_float() # Quota for this period
self.have_quota = bool(cfg.quota_size())
data = sabnzbd.filesystem.load_admin(BYTES_FILE_NAME)
try:
(
self.last_update,
self.grand_total,
self.day_total,
self.week_total,
self.month_total,
self.end_of_day,
self.end_of_week,
self.end_of_month,
self.quota,
self.left,
self.q_time,
self.timeline_total,
) = data[:12]
# Article statistics were only added in 3.2.x
if len(data) > 12:
self.article_stats_tried, self.article_stats_failed = data[12:14]
# Clean the data, it could have invalid values in older versions
for server in self.timeline_total:
for data_data in self.timeline_total[server]:
if not isinstance(self.timeline_total[server][data_data], int):
self.timeline_total[server][data_data] = 0
# Trigger quota actions
if abs(quota - self.quota) > 0.5:
self.change_quota()
res = self.reset_quota()
except:
self.defaults()
return res
def init_server_stats(self, server: str = None):
"""Initialize counters for "server" """
if server not in self.cached_amount:
self.cached_amount[server] = 0
self.server_bps[server] = 0.0
if server not in self.day_total:
self.day_total[server] = 0
if server not in self.week_total:
self.week_total[server] = 0
if server not in self.month_total:
self.month_total[server] = 0
if server not in self.month_total:
self.month_total[server] = 0
if server not in self.grand_total:
self.grand_total[server] = 0
if server not in self.timeline_total:
self.timeline_total[server] = {}
if self.day_label not in self.timeline_total[server]:
self.timeline_total[server][self.day_label] = 0
if server not in self.server_bps:
self.server_bps[server] = 0.0
if server not in self.article_stats_tried:
self.article_stats_tried[server] = {}
self.article_stats_failed[server] = {}
if self.day_label not in self.article_stats_tried[server]:
self.article_stats_tried[server][self.day_label] = 0
self.article_stats_failed[server][self.day_label] = 0
def update(self, server: Optional[str] = None, amount: int = 0):
"""Update counters for "server" with "amount" bytes"""
# Add amount to temporary storage
if server:
self.cached_amount[server] += amount
self.sum_cached_amount += amount
return
t = time.time()
if t > self.end_of_day:
# Current day passed, get new end of day
self.day_label = time.strftime("%Y-%m-%d")
self.end_of_day = tomorrow(t) - 1.0
self.day_total = {}
# Reset delayed counters so they don't go too high
self.delayed_assembler = 0
# Check end of week and end of month
if t > self.end_of_week:
self.week_total = {}
self.end_of_week = next_week(t) - 1.0
if t > self.end_of_month:
self.month_total = {}
self.end_of_month = next_month(t) - 1.0
# Need to reset all counters
for server in sabnzbd.Downloader.servers[:]:
self.init_server_stats(server.id)
# Add amounts that have been stored temporarily to statistics
for srv in self.cached_amount:
if self.cached_amount[srv]:
self.day_total[srv] += self.cached_amount[srv]
self.week_total[srv] += self.cached_amount[srv]
self.month_total[srv] += self.cached_amount[srv]
self.grand_total[srv] += self.cached_amount[srv]
self.timeline_total[srv][self.day_label] += self.cached_amount[srv]
# Update server bps
try:
self.server_bps[srv] = (
self.server_bps[srv] * (self.last_update - self.start_time)
+ self.cached_amount[srv]
) / (t - self.start_time)
except ZeroDivisionError:
self.server_bps[srv] = 0.0
# Reset for next time
self.cached_amount[srv] = 0
# Quota check
if self.have_quota and self.quota_enabled:
self.left -= self.sum_cached_amount
if self.left <= 0.0:
if not sabnzbd.Downloader.paused:
sabnzbd.Downloader.pause()
logging.warning(T("Quota spent, pausing downloading"))
# Speedometer
try:
self.bps = (
self.bps * (self.last_update - self.start_time) + self.sum_cached_amount
) / (t - self.start_time)
except ZeroDivisionError:
self.bps = 0.0
self.sum_cached_amount = 0
self.last_update = t
check_time = t - 5.0
if self.start_time < check_time:
self.start_time = check_time
if self.bps < 0.01:
self.reset()
elif self.log_time < check_time:
logging.debug("Speed: %sB/s", to_units(self.bps))
self.log_time = t
if self.speed_log_time < (t - 1.0):
self.add_empty_time()
self.bps_list.append(int(self.bps / KIBI))
self.speed_log_time = t
def register_server_article_tried(self, server: str):
"""Keep track how many articles were tried for each server"""
self.article_stats_tried[server][self.day_label] += 1
def register_server_article_failed(self, server: str):
"""Keep track how many articles failed for each server"""
self.article_stats_failed[server][self.day_label] += 1
def reset(self):
t = time.time()
self.start_time = t
self.log_time = t
self.last_update = t
# Reset general BPS and the for all servers
self.bps = 0.0
for server in self.server_bps:
self.server_bps[server] = 0.0
def add_empty_time(self):
# Extra zeros, but never more than the maximum!
nr_diffs = min(int(time.time() - self.speed_log_time), BPS_LIST_MAX)
if nr_diffs > 1:
self.bps_list.extend([0] * nr_diffs)
# Always trim the list to the max-length
if len(self.bps_list) > BPS_LIST_MAX:
self.bps_list = self.bps_list[len(self.bps_list) - BPS_LIST_MAX :]
def get_sums(self):
"""return tuple of grand, month, week, day totals"""
return (
sum(self.grand_total.values()),
sum(self.month_total.values()),
sum(self.week_total.values()),
sum(self.day_total.values()),
)
def amounts(self, server: str):
"""Return grand, month, week, day and article totals for specified server"""
return (
self.grand_total.get(server, 0),
self.month_total.get(server, 0),
self.week_total.get(server, 0),
self.day_total.get(server, 0),
self.timeline_total.get(server, {}),
self.article_stats_tried.get(server, {}),
self.article_stats_failed.get(server, {}),
)
def clear_server(self, server: str):
"""Clean counters for specified server"""
if server in self.day_total:
del self.day_total[server]
if server in self.week_total:
del self.week_total[server]
if server in self.month_total:
del self.month_total[server]
if server in self.grand_total:
del self.grand_total[server]
if server in self.timeline_total:
del self.timeline_total[server]
if server in self.article_stats_tried:
del self.article_stats_tried[server]
if server in self.article_stats_failed:
del self.article_stats_failed[server]
self.init_server_stats(server)
self.save()
def get_bps_list(self):
refresh_rate = int(cfg.refresh_rate()) if cfg.refresh_rate() else 1
self.add_empty_time()
# We record every second, but display at the user's refresh-rate
return self.bps_list[::refresh_rate]
def reset_quota(self, force: bool = False):
"""Check if it's time to reset the quota, optionally resuming
Return True, when still paused or should be paused
"""
if force or (self.have_quota and time.time() > (self.q_time - 50)):
self.quota = self.left = cfg.quota_size.get_float()
logging.info("Quota was reset to %s", self.quota)
if cfg.quota_resume():
logging.info("Auto-resume due to quota reset")
sabnzbd.Downloader.resume()
self.next_reset()
return False
else:
return True
def next_reset(self, t: Optional[float] = None):
"""Determine next reset time"""
t = t or time.time()
tm = time.localtime(t)
if self.q_period == "d":
nx = (tm[0], tm[1], tm[2], self.q_hour, self.q_minute, 0, 0, 0, tm[8])
if (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute):
# If today's moment has passed, it will happen tomorrow
t = time.mktime(nx) + 24 * 3600
tm = time.localtime(t)
elif self.q_period == "w":
if self.q_day < tm.tm_wday + 1 or (
self.q_day == tm.tm_wday + 1
and (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute)
):
tm = time.localtime(next_week(t))
dif = abs(self.q_day - tm.tm_wday - 1)
t = time.mktime(tm) + dif * 24 * 3600
tm = time.localtime(t)
elif self.q_period == "m":
if self.q_day < tm.tm_mday or (
self.q_day == tm.tm_mday
and (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute)
):
tm = time.localtime(next_month(t))
day = min(last_month_day(tm), self.q_day)
tm = (tm[0], tm[1], day, self.q_hour, self.q_minute, 0, 0, 0, tm[8])
else:
return
tm = (tm[0], tm[1], tm[2], self.q_hour, self.q_minute, 0, 0, 0, tm[8])
self.q_time = time.mktime(tm)
logging.debug("Will reset quota at %s", tm)
def change_quota(self, allow_resume: bool = True):
"""Update quota, potentially pausing downloader"""
if not self.have_quota and self.quota < 0.5:
# Never set, use last period's size
per = cfg.quota_period()
sums = self.get_sums()
if per == "d":
self.left = sums[3]
elif per == "w":
self.left = sums[2]
elif per == "m":
self.left = sums[1]
self.have_quota = bool(cfg.quota_size())
if self.have_quota:
quota = cfg.quota_size.get_float()
if self.quota:
# Quota change, recalculate amount left
self.left = quota - (self.quota - self.left)
else:
# If previously no quota, self.left holds this period's usage
self.left = quota - self.left
self.quota = quota
else:
self.quota = self.left = 0
self.update()
self.next_reset()
if self.left > 0.5 and allow_resume:
self.resume()
def get_quota(self):
"""If quota active, return check-function, hour, minute"""
if self.have_quota:
self.q_period = cfg.quota_period()[0].lower()
self.q_day = 1
self.q_hour = self.q_minute = 0
# Pattern = <day#> <hh:mm>
# The <day> and <hh:mm> part can both be optional
txt = cfg.quota_day().lower()
if m := RE_DAY.search(txt):
self.q_day = int(m.group(1))
if m := RE_HHMM.search(txt):
self.q_hour = int(m.group(1))
self.q_minute = int(m.group(2))
if self.q_period == "w":
self.q_day = max(1, self.q_day)
self.q_day = min(7, self.q_day)
elif self.q_period == "m":
self.q_day = max(1, self.q_day)
self.q_day = min(31, self.q_day)
else:
self.q_day = 1
self.change_quota(allow_resume=False)
return quota_handler, self.q_hour, self.q_minute
else:
return None, 0, 0
def set_status(self, status: bool, action: bool = True):
"""Disable/enable quota management"""
self.quota_enabled = status
if action and not status:
self.resume()
@staticmethod
def resume():
"""Resume downloading"""
if cfg.quota_resume() and sabnzbd.Downloader.paused:
sabnzbd.Downloader.resume()
def quota_handler():
"""To be called from scheduler"""
logging.debug("Checking quota")
sabnzbd.BPSMeter.reset_quota()
|
quodlibet | exfalso | # Copyright 2004-2005 Joe Wreschnig, Niklas Janlert
# 2012 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
from quodlibet import _, app, config, const, util
from senf import fsnative
def main(argv=None):
if argv is None:
argv = sys.argv
import quodlibet
config_file = os.path.join(quodlibet.get_user_dir(), "config")
quodlibet.init(config_file=config_file)
from quodlibet.qltk import add_signal_watch
add_signal_watch(app.quit)
opts = util.OptionParser(
"Ex Falso", const.VERSION, _("an audio tag editor"), "[%s]" % _("directory")
)
argv.append(os.path.abspath(fsnative(".")))
opts, args = opts.parse(argv[1:])
args[0] = os.path.realpath(args[0])
app.name = "Ex Falso"
app.description = _("Audio metadata editor")
app.id = "io.github.quodlibet.ExFalso"
app.process_name = "exfalso"
quodlibet.set_application_info(app)
import quodlibet.library
import quodlibet.player
app.library = quodlibet.library.init()
app.player = quodlibet.player.init_player("nullbe", app.librarian)
from quodlibet.qltk.songlist import PlaylistModel
app.player.setup(PlaylistModel(), None, 0)
pm = quodlibet.init_plugins()
pm.rescan()
from quodlibet.qltk.exfalsowindow import ExFalsoWindow
dir_ = args[0]
app.window = ExFalsoWindow(app.library, dir_)
app.window.init_plugins()
from quodlibet.util.cover import CoverManager
app.cover_manager = CoverManager()
app.cover_manager.init_plugins()
from quodlibet import session
session_client = session.init(app)
quodlibet.enable_periodic_save(save_library=False)
quodlibet.run(app.window)
quodlibet.finish_first_session("exfalso")
config.save()
session_client.close()
util.print_d("Finished shutdown.")
|
PyObjCTest | test_protocol | import platform
import sys
import warnings
import objc
from PyObjCTools.TestSupport import *
# Most useful systems will at least have 'NSObject'.
NSObject = objc.lookUpClass("NSObject")
# XXX : This is a really dumb way to detect < 10.3
if not NSObject.instancesRespondToSelector_("setValue:forKey:"):
# Defining protocols in an MH_BUNDLE makes < 10.3 explode
OC_TestProtocol = None
else:
from PyObjCTest.protocol import OC_TestProtocol
MyProto = objc.informal_protocol(
"MyProto",
(
objc.selector(None, selector=b"testMethod", signature=b"I@:", isRequired=1),
objc.selector(None, selector=b"testMethod2:", signature=b"v@:i", isRequired=0),
),
)
class TestInformalProtocols(TestCase):
def testMissingProto(self):
class ProtoClass1(NSObject):
def testMethod(self):
pass
self.assertEqual(ProtoClass1.testMethod.signature, b"I@:")
def doIncompleteClass(self):
class ProtoClass2(NSObject, MyProto):
def testMethod2_(self, x):
pass
def testIncompleteClass(self):
self.assertRaises(TypeError, self.doIncompleteClass)
@onlyIf(sys.version_info[:2] < (3, 2), "not valid for python 3.3 and later")
def testOptional(self):
class ProtoClass3(NSObject, MyProto):
def testMethod(self):
pass
if (sys.maxsize < 2**32 or platform.mac_ver()[0] >= "10.7") and sys.version_info[
0
] == 2:
EmptyProtocol = objc.formal_protocol("EmptyProtocol", None, ())
MyProtocol = objc.formal_protocol(
"MyProtocol",
None,
(
objc.selector(None, selector=b"protoMethod", signature=b"I@:"),
objc.selector(None, selector=b"anotherProto:with:", signature=b"v@:ii"),
),
)
MyOtherProtocol = objc.formal_protocol(
"MyOtherProtocol",
(MyProtocol,),
[objc.selector(None, selector=b"yetAnother:", signature=b"i@:I")],
)
MyClassProtocol = objc.formal_protocol(
"MyClassProtocol",
None,
[
objc.selector(None, selector=b"anAnotherOne:", signature=b"i@:i"),
objc.selector(
None, selector=b"aClassOne:", signature=b"@@:i", isClassMethod=1
),
],
)
if OC_TestProtocol is not None:
class TestFormalOCProtocols(TestCase):
def testMethodInfo(self):
actual = OC_TestProtocol.instanceMethods()
actual.sort(key=lambda item: item["selector"])
expected = [
{"required": True, "selector": b"method1", "typestr": b"i@:"},
{"required": True, "selector": b"method2:", "typestr": b"v@:i"},
]
self.assertEqual(actual, expected)
self.assertEqual(OC_TestProtocol.classMethods(), [])
self.assertEqual(
OC_TestProtocol.descriptionForInstanceMethod_(b"method1"),
(b"method1", b"i@:"),
)
self.assertEqual(
OC_TestProtocol.descriptionForInstanceMethod_(b"method2:"),
(b"method2:", b"v@:i"),
)
def testImplementFormalProtocol(self):
class MyClassNotImplementingProtocol(NSObject):
pass
self.assertFalse(
MyClassNotImplementingProtocol.pyobjc_classMethods.conformsToProtocol_(
OC_TestProtocol
)
)
try:
class MyClassNotAlsoImplementingProtocol(NSObject, OC_TestProtocol):
def method1(self):
pass
self.fail("class not implementing protocol, yet created")
except TypeError:
pass
class MyClassImplementingProtocol(NSObject, OC_TestProtocol):
def method1(self):
pass
def method2_(self, a):
pass
self.assertTrue(
MyClassImplementingProtocol.pyobjc_classMethods.conformsToProtocol_(
OC_TestProtocol
)
)
# The PyObjC implementation of formal protocols is slightly looser
# than Objective-C itself: you can inherit part of the protocol
# from the superclass.
# XXX: not really: you won't inherit the right signatures by default
class MyClassImplementingHalfOfProtocol(NSObject):
def method1(self):
pass
method1 = objc.selector(method1, signature=b"i@:")
self.assertFalse(
MyClassImplementingHalfOfProtocol.pyobjc_classMethods.conformsToProtocol_(
OC_TestProtocol
)
)
class MyClassImplementingAllOfProtocol(
MyClassImplementingHalfOfProtocol, OC_TestProtocol
):
def method2_(self, v):
pass
self.assertTrue(
MyClassImplementingAllOfProtocol.pyobjc_classMethods.conformsToProtocol_(
OC_TestProtocol
)
)
class TestFormalProtocols(TestCase):
# Implement unittests for formal protocols here.
#
def testImplementAnotherObject(self):
anObject = NSObject.alloc().init()
try:
class MyClassImplementingAnotherObject(NSObject, anObject):
pass
self.fail()
except TypeError:
pass
try:
class MyClassImplementingAnotherObject(NSObject, 10):
pass
self.fail()
except TypeError:
pass
try:
class MyClassImplementingAnotherObject(NSObject, int):
pass
self.fail()
except TypeError:
pass
def dont_testDefiningingProtocols(self):
# Pretty useless, but should work
self.assertTrue(MyOtherProtocol.conformsTo_(MyProtocol))
try:
class MyClassImplementingMyProtocol(NSObject, MyProtocol):
pass
# Declare to implement a protocol, but don't do it?
self.fail()
except TypeError:
pass
class MyClassImplementingMyProtocol(NSObject, MyProtocol):
def protoMethod(self):
return 1
def anotherProto_with_(self, a1, a2):
pass
self.assertEqual(
MyClassImplementingMyProtocol.protoMethod.signature, b"I@:"
)
self.assertEqual(
MyClassImplementingMyProtocol.anotherProto_with_.signature, b"v@:ii"
)
self.assertTrue(
MyClassImplementingMyProtocol.pyobjc_classMethods.conformsToProtocol_(
MyProtocol
)
)
class MyClassImplementingMyOtherProtocol(NSObject, MyOtherProtocol):
def protoMethod(self):
pass
def anotherProto_with_(self, a1, a2):
pass
def yetAnother_(self, a):
pass
self.assertEqual(
MyClassImplementingMyOtherProtocol.protoMethod.signature, b"I@:"
)
self.assertEqual(
MyClassImplementingMyOtherProtocol.anotherProto_with_.signature,
b"v@:ii",
)
self.assertEqual(
MyClassImplementingMyOtherProtocol.yetAnother_.signature, b"i@:I"
)
self.assertTrue(
MyClassImplementingMyOtherProtocol.pyobjc_classMethods.conformsToProtocol_(
MyProtocol
)
)
self.assertTrue(
MyClassImplementingMyOtherProtocol.pyobjc_classMethods.conformsToProtocol_(
MyOtherProtocol
)
)
try:
class ImplementingMyClassProtocol(NSObject, MyClassProtocol):
pass
self.fail()
except TypeError:
pass
class ImplementingMyClassProtocol(NSObject, MyClassProtocol):
def anAnotherOne_(self, a):
pass
def aClassOne_(self, a):
pass
aClassOne_ = classmethod(aClassOne_)
self.assertEqual(
ImplementingMyClassProtocol.anAnotherOne_.signature, b"i@:i"
)
self.assertEqual(ImplementingMyClassProtocol.aClassOne_.isClassMethod, True)
self.assertEqual(ImplementingMyClassProtocol.aClassOne_.signature, b"@@:i")
# TODO: protocol with class and instance method with different
# signatures.
# TODO: should not need to specify classmethod() if it can be
# deduced from the protocol
def testIncorrectlyDefiningFormalProtocols(self):
# Some bad calls to objc.formal_protocol
self.assertRaises(TypeError, objc.formal_protocol, [], None, ())
self.assertRaises(
TypeError, objc.formal_protocol, "supers", (NSObject,), ()
)
self.assertRaises(
TypeError,
objc.formal_protocol,
"supers",
objc.protocolNamed("NSLocking"),
(),
)
self.assertRaises(
TypeError,
objc.formal_protocol,
"supers",
[
objc.protocolNamed("NSLocking"),
"hello",
],
(),
)
self.assertRaises(
TypeError,
objc.formal_protocol,
"supers",
None,
[
objc.selector(None, selector=b"fooMethod:", signature=b"v@:i"),
"hello",
],
)
def testMethodInfo(self):
self.assertEqual(
MyProtocol.instanceMethods(),
[
{"typestr": b"I@:", "required": True, "selector": b"protoMethod"},
{
"typestr": b"v@:ii",
"required": True,
"selector": b"anotherProto:with:",
},
],
)
self.assertEqual(MyProtocol.classMethods(), [])
self.assertEqual(
MyProtocol.descriptionForInstanceMethod_(b"protoMethod"),
("protoMethod", "I@:"),
)
self.assertEqual(
MyProtocol.descriptionForInstanceMethod_(b"nosuchmethod"), None
)
self.assertEqual(
MyClassProtocol.classMethods(),
[{"required": True, "selector": "aClassOne:", "typestr": "@@:i"}],
)
self.assertEqual(MyProtocol.classMethods(), [])
self.assertEqual(
MyClassProtocol.descriptionForClassMethod_(b"aClassOne:"),
("aClassOne:", "@@:i"),
)
self.assertEqual(
MyClassProtocol.descriptionForClassMethod_(b"nosuchmethod"), None
)
def dont_testObjCInterface(self):
# TODO: tests that access the Objective-C interface of protocols
# (those methods should be forwarded to the underlying object, as
# with objc.pyobjc_unicode).
# NOTE: This is not very important, the only methods that are not
# explicitly wrapped should be compatibility methods that will
# cause a warning when called.
self.assertEqual(1, 0)
if __name__ == "__main__":
main()
|
events | discord_status | # discord_status: Set Discord status as current song.
#
# Copyright (c) 2022 Aditi K <105543244+teeleafs@users.noreply.github.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from gi.repository import Gtk
from quodlibet import _, app
from quodlibet.pattern import Pattern
from quodlibet.plugins import ConfProp, PluginConfig
from quodlibet.plugins.events import EventPlugin
try:
from pypresence import DiscordNotFound, InvalidID, Presence
except ImportError:
from quodlibet.plugins import MissingModulePluginException
raise MissingModulePluginException("pypresence")
# The below resources are from/uploaded-to the Discord Application portal.
QL_DISCORD_RP_ID = "974521025356242984"
QL_LARGE_IMAGE = "io-github-quodlibet-quodlibet"
VERSION = "1.0"
# Default Rich Presence status lines.
CONFIG_DEFAULT_RP_LINE1 = "<artist> / <title>"
CONFIG_DEFAULT_RP_LINE2 = "<album>"
class DiscordStatusConfig:
_config = PluginConfig(__name__)
rp_line1 = ConfProp(_config, "rp_line1", CONFIG_DEFAULT_RP_LINE1)
rp_line2 = ConfProp(_config, "rp_line2", CONFIG_DEFAULT_RP_LINE2)
discord_status_config = DiscordStatusConfig()
class DiscordStatusMessage(EventPlugin):
PLUGIN_ID = _("Discord status message")
PLUGIN_NAME = _("Discord Status Message")
PLUGIN_DESC = _(
"Change your Discord status message according to what "
"you're currently listening to."
)
VERSION = VERSION
def __init__(self):
self.song = None
self.discordrp = None
def update_discordrp(self, details, state=None):
if not self.discordrp:
try:
self.discordrp = Presence(QL_DISCORD_RP_ID, pipe=0)
self.discordrp.connect()
except (DiscordNotFound, ConnectionRefusedError):
self.discordrp = None
if self.discordrp:
try:
self.discordrp.update(
details=details, state=state, large_image=QL_LARGE_IMAGE
)
except InvalidID:
# XXX Discord was closed?
self.discordrp = None
def handle_play(self):
if self.song:
details = Pattern(discord_status_config.rp_line1) % self.song
state = Pattern(discord_status_config.rp_line2) % self.song
# The details and state fields must be atleast 2 characters.
if len(details) < 2:
details = None
if len(state) < 2:
state = None
self.update_discordrp(details, state)
def handle_paused(self):
self.update_discordrp(details=_("Paused"))
def handle_unpaused(self):
if not self.song:
self.song = app.player.song
self.handle_play()
def plugin_on_song_started(self, song):
self.song = song
if not app.player.paused:
self.handle_play()
def plugin_on_paused(self):
self.handle_paused()
def plugin_on_unpaused(self):
self.handle_unpaused()
def enabled(self):
if app.player.paused:
self.handle_paused()
else:
self.handle_unpaused()
def disabled(self):
if self.discordrp:
self.discordrp.clear()
self.discordrp.close()
self.discordrp = None
self.song = None
def PluginPreferences(self, parent):
vb = Gtk.VBox(spacing=6)
def rp_line1_changed(entry):
discord_status_config.rp_line1 = entry.get_text()
if not app.player.paused:
self.handle_play()
def rp_line2_changed(entry):
discord_status_config.rp_line2 = entry.get_text()
if not app.player.paused:
self.handle_play()
status_line1_box = Gtk.HBox(spacing=6)
status_line1_box.set_border_width(3)
status_line1 = Gtk.Entry()
status_line1.set_text(discord_status_config.rp_line1)
status_line1.connect("changed", rp_line1_changed)
status_line1_box.pack_start(
Gtk.Label(label=_("Status Line #1")), False, True, 0
)
status_line1_box.pack_start(status_line1, True, True, 0)
status_line2_box = Gtk.HBox(spacing=3)
status_line2_box.set_border_width(3)
status_line2 = Gtk.Entry()
status_line2.set_text(discord_status_config.rp_line2)
status_line2.connect("changed", rp_line2_changed)
status_line2_box.pack_start(
Gtk.Label(label=_("Status Line #2")), False, True, 0
)
status_line2_box.pack_start(status_line2, True, True, 0)
vb.pack_start(status_line1_box, True, True, 0)
vb.pack_start(status_line2_box, True, True, 0)
return vb
|
songsmenu | ifp | # Copyright 2004-2005 Joe Wreschnig
# 2016 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
from quodlibet import _, qltk, util
from quodlibet.plugins.songshelpers import each_song, is_a_file
from quodlibet.plugins.songsmenu import SongsMenuPlugin
from quodlibet.qltk import Icons
from quodlibet.qltk.wlw import WaitLoadWindow
class IFPUpload(SongsMenuPlugin):
PLUGIN_ID = "Send to iFP"
PLUGIN_NAME = _("Send to iFP")
PLUGIN_DESC = _("Uploads songs to an iRiver iFP device.")
PLUGIN_ICON = Icons.MULTIMEDIA_PLAYER
plugin_handles = each_song(is_a_file)
def plugin_songs(self, songs):
if os.system("ifp typestring"):
qltk.ErrorMessage(
None,
_("No iFP device found"),
_(
"Unable to contact your iFP device. Check "
"that the device is powered on and plugged "
"in, and that you have ifp-line "
"(http://ifp-driver.sf.net) installed."
),
).run()
return True
self.__madedir = []
w = WaitLoadWindow(None, len(songs), _("Uploading %(current)d/%(total)d"))
w.show()
for i, song in enumerate(songs):
if self.__upload(song) or w.step():
w.destroy()
return True
else:
w.destroy()
def __upload(self, song):
filename = song["~filename"]
basename = song("~basename")
dirname = os.path.basename(os.path.dirname(filename))
target = os.path.join(dirname, basename)
# Avoid spurious calls to ifp mkdir; this can take a long time
# on a noisy USB line.
if dirname not in self.__madedir:
os.system("ifp mkdir %r> /dev/null 2>/dev/null" % dirname)
self.__madedir.append(dirname)
if os.system("ifp upload %r %r > /dev/null" % (filename, target)):
tmpl = _(
"Unable to upload %s." "The device may be out of space, or turned off."
)
qltk.ErrorMessage(
None,
_("Error uploading"),
tmpl % util.bold(filename),
escape_desc=False,
).run()
return True
|
slow-coro-detection | patch | from __future__ import annotations
import time
from asyncio import Handle
from tribler.core.utilities.slow_coro_detection import logger
from tribler.core.utilities.slow_coro_detection.utils import format_info
from tribler.core.utilities.slow_coro_detection.watching_thread import (
SLOW_CORO_DURATION_THRESHOLD,
current,
lock,
)
# pylint: disable=protected-access
_original_handle_run = Handle._run
def patch_asyncio():
"""
Patches the asyncio internal methods to be able to track the current coroutine executed by the loop.
You also need to call `start_watching_thread()` to run a separate thread that detects and reports slow coroutines.
"""
with lock:
if getattr(Handle._run, "patched", False):
return # the _run method is already patched
Handle._run = patched_handle_run
Handle._run.patched = True
def patched_handle_run(self: Handle):
"""
Remembers the current asyncio handle object and its starting time globally, so it becomes possible
to access it from the separate thread and detect slow coroutines.
"""
start_time = time.time()
with lock:
current.handle, current.start_time = self, start_time
try:
_original_handle_run(self)
finally:
with lock:
current.handle = current.start_time = None
duration = time.time() - start_time
if duration > SLOW_CORO_DURATION_THRESHOLD:
# The coroutine step is finished successfully (without freezing), but the execution time was too long
_report_long_duration(self, duration)
self = None # Needed to break cycles when an exception occurs (copied from the original Handle._run method)
def _report_long_duration(handle: Handle, duration: float):
info_str = format_info(handle)
logger.error(
f"Slow coroutine step execution (duration={duration:.3f} seconds): {info_str}"
)
|
models | phone_call | from django.db import models
# Duplicate to avoid circular import to provide values for status field
class TwilioCallStatuses:
QUEUED = 10
RINGING = 20
IN_PROGRESS = 30
COMPLETED = 40
BUSY = 50
FAILED = 60
NO_ANSWER = 70
CANCELED = 80
CHOICES = (
(QUEUED, "queued"),
(RINGING, "ringing"),
(IN_PROGRESS, "in-progress"),
(COMPLETED, "completed"),
(BUSY, "busy"),
(FAILED, "failed"),
(NO_ANSWER, "no-answer"),
(CANCELED, "canceled"),
)
class PhoneCallRecord(models.Model):
class Meta:
db_table = "twilioapp_phonecall"
exceeded_limit = models.BooleanField(null=True, default=None)
represents_alert = models.ForeignKey(
"alerts.Alert", on_delete=models.SET_NULL, null=True, default=None
) # deprecateed
represents_alert_group = models.ForeignKey(
"alerts.AlertGroup", on_delete=models.SET_NULL, null=True, default=None
)
notification_policy = models.ForeignKey(
"base.UserNotificationPolicy",
on_delete=models.SET_NULL,
null=True,
default=None,
)
receiver = models.ForeignKey(
"user_management.User", on_delete=models.CASCADE, null=True, default=None
)
created_at = models.DateTimeField(auto_now_add=True)
grafana_cloud_notification = models.BooleanField(default=False) # rename
# deprecated. It's here for backward compatibility for calls made during or shortly before migration.
# Should be removed soon after migration
status = models.PositiveSmallIntegerField(
blank=True,
null=True,
choices=TwilioCallStatuses.CHOICES,
)
sid = models.CharField(
blank=True,
max_length=50,
)
class ProviderPhoneCall(models.Model):
"""
ProviderPhoneCall is an interface between PhoneCallRecord and call data returned from PhoneProvider.
Concrete provider phone call should be inherited from ProviderPhoneCall.
Some phone providers allows to track status of call or gather pressed digits (we use it to ack/resolve alert group).
It is needed to link phone call and alert group without exposing internals of concrete phone provider to PhoneBackend.
"""
class Meta:
abstract = True
phone_call_record = models.OneToOneField(
"phone_notifications.PhoneCallRecord",
on_delete=models.CASCADE,
related_name="%(app_label)s_%(class)s_related",
related_query_name="%(app_label)s_%(class)ss",
null=False,
)
def link_and_save(self, phone_call_record: PhoneCallRecord):
self.phone_call_record = phone_call_record
self.save()
|
workflows | clickhouse | import collections.abc
import contextlib
import datetime as dt
import json
import typing
import uuid
import aiohttp
import pyarrow as pa
import requests
from django.conf import settings
def encode_clickhouse_data(data: typing.Any) -> bytes:
"""Encode data for ClickHouse.
Depending on the type of data the encoding is different.
Returns:
The encoded bytes.
"""
match data:
case None:
return b"NULL"
case uuid.UUID():
return f"'{data}'".encode("utf-8")
case int():
return b"%d" % data
case dt.datetime():
timezone_arg = ""
if data.tzinfo:
timezone_arg = f", '{data:%Z}'"
if data.microsecond == 0:
return f"toDateTime('{data:%Y-%m-%d %H:%M:%S}'{timezone_arg})".encode(
"utf-8"
)
return (
f"toDateTime64('{data:%Y-%m-%d %H:%M:%S.%f}', 6{timezone_arg})".encode(
"utf-8"
)
)
case list():
encoded_data = [encode_clickhouse_data(value) for value in data]
result = b"[" + b",".join(encoded_data) + b"]"
return result
case tuple():
encoded_data = [encode_clickhouse_data(value) for value in data]
result = b"(" + b",".join(encoded_data) + b")"
return result
case dict():
return json.dumps(data).encode("utf-8")
case _:
str_data = str(data)
str_data = str_data.replace("\\", "\\\\").replace("'", "\\'")
return f"'{str_data}'".encode("utf-8")
class ClickHouseError(Exception):
"""Base Exception representing anything going wrong with ClickHouse."""
def __init__(self, query, error_message):
self.query = query
super().__init__(error_message)
class ClickHouseClient:
"""An asynchronous client to access ClickHouse via HTTP.
Attributes:
session: The underlying aiohttp.ClientSession used for HTTP communication.
url: The URL of the ClickHouse cluster.
headers: Headers sent to ClickHouse in an HTTP request. Includes authentication details.
params: Parameters passed as query arguments in the HTTP request. Common ones include the
ClickHouse database and the 'max_execution_time'.
"""
def __init__(
self,
session: aiohttp.ClientSession | None = None,
url: str = "http://localhost:8123",
user: str = "default",
password: str = "",
database: str = "default",
**kwargs,
):
if session is None:
self.session = aiohttp.ClientSession()
else:
self.session = session
self.url = url
self.headers = {}
self.params = {}
if user:
self.headers["X-ClickHouse-User"] = user
if password:
self.headers["X-ClickHouse-Key"] = password
if database:
self.params["database"] = database
self.params.update(kwargs)
@classmethod
def from_posthog_settings(cls, session, settings, **kwargs):
"""Initialize a ClickHouseClient from PostHog settings."""
return cls(
session=session,
url=settings.CLICKHOUSE_URL,
user=settings.CLICKHOUSE_USER,
password=settings.CLICKHOUSE_PASSWORD,
database=settings.CLICKHOUSE_DATABASE,
**kwargs,
)
async def is_alive(self) -> bool:
"""Check if the connection is alive by sending a SELECT 1 query.
Returns:
A boolean indicating whether the connection is alive.
"""
try:
await self.session.get(
url=self.url,
params={**self.params, "query": "SELECT 1"},
headers=self.headers,
raise_for_status=True,
)
except aiohttp.ClientResponseError:
return False
return True
def prepare_query(
self, query: str, query_parameters: None | dict[str, typing.Any] = None
) -> str:
"""Prepare the query being sent by encoding and formatting it with the provided parameters.
Returns:
The formatted query.
"""
if query_parameters:
format_parameters = {
k: encode_clickhouse_data(v).decode("utf-8")
for k, v in query_parameters.items()
}
else:
format_parameters = {}
query = query.format(**format_parameters)
return query
def prepare_request_data(
self, data: collections.abc.Sequence[typing.Any]
) -> bytes | None:
"""Prepare the request data sent by encoding it.
Returns:
The request data to be passed as the body of the request.
"""
if len(data) > 0:
request_data = b",".join(encode_clickhouse_data(value) for value in data)
else:
request_data = None
return request_data
async def acheck_response(self, response, query) -> None:
"""Asynchronously check the HTTP response received from ClickHouse.
Raises:
ClickHouseError: If the status code is not 200.
"""
if response.status != 200:
error_message = await response.text()
raise ClickHouseError(query, error_message)
def check_response(self, response, query) -> None:
"""Check the HTTP response received from ClickHouse.
Raises:
ClickHouseError: If the status code is not 200.
"""
if response.status_code != 200:
error_message = response.text
raise ClickHouseError(query, error_message)
@contextlib.asynccontextmanager
async def apost_query(
self, query, *data, query_parameters, query_id
) -> collections.abc.AsyncIterator[aiohttp.ClientResponse]:
"""POST a query to the ClickHouse HTTP interface.
The context manager protocol is used to control when to release the response.
Arguments:
query: The query to POST.
*data: Iterable of values to include in the body of the request. For example, the tuples of VALUES for an INSERT query.
query_parameters: Parameters to be formatted in the query.
query_id: A query ID to pass to ClickHouse.
Returns:
The response received from the ClickHouse HTTP interface.
"""
params = {**self.params}
if query_id is not None:
params["query_id"] = query_id
query = self.prepare_query(query, query_parameters)
request_data = self.prepare_request_data(data)
if request_data:
params["query"] = query
else:
request_data = query.encode("utf-8")
async with self.session.post(
url=self.url, params=params, headers=self.headers, data=request_data
) as response:
await self.acheck_response(response, query)
yield response
@contextlib.contextmanager
def post_query(
self, query, *data, query_parameters, query_id
) -> collections.abc.Iterator:
"""POST a query to the ClickHouse HTTP interface.
The context manager protocol is used to control when to release the response.
Arguments:
query: The query to POST.
*data: Iterable of values to include in the body of the request. For example, the tuples of VALUES for an INSERT query.
query_parameters: Parameters to be formatted in the query.
query_id: A query ID to pass to ClickHouse.
Returns:
The response received from the ClickHouse HTTP interface.
"""
params = {**self.params}
if query_id is not None:
params["query_id"] = query_id
query = self.prepare_query(query, query_parameters)
request_data = self.prepare_request_data(data)
if request_data:
params["query"] = query
else:
request_data = query.encode("utf-8")
with requests.Session() as s:
response = s.post(
url=self.url,
params=params,
headers=self.headers,
data=request_data,
stream=True,
verify=False,
)
self.check_response(response, query)
yield response
async def execute_query(
self, query, *data, query_parameters=None, query_id: str | None = None
) -> None:
"""Execute the given query in ClickHouse.
This method doesn't return any response.
"""
async with self.apost_query(
query, *data, query_parameters=query_parameters, query_id=query_id
):
return None
async def read_query(
self, query, *data, query_parameters=None, query_id: str | None = None
) -> bytes:
"""Execute the given query in ClickHouse and read the response in full.
As the entire payload will be read at once, use this method when expecting a small payload, like
when running a 'count(*)' query.
"""
async with self.apost_query(
query, *data, query_parameters=query_parameters, query_id=query_id
) as response:
return await response.content.read()
async def stream_query_as_jsonl(
self,
query,
*data,
query_parameters=None,
query_id: str | None = None,
line_separator=b"\n",
) -> typing.AsyncGenerator[dict[typing.Any, typing.Any], None]:
"""Execute the given query in ClickHouse and stream back the response as one JSON per line.
This method makes sense when running with FORMAT JSONEachRow, although we currently do not enforce this.
"""
buffer = b""
async with self.apost_query(
query, *data, query_parameters=query_parameters, query_id=query_id
) as response:
async for chunk in response.content.iter_any():
lines = chunk.split(line_separator)
yield json.loads(buffer + lines[0])
buffer = lines.pop(-1)
for line in lines[1:]:
yield json.loads(line)
def stream_query_as_arrow(
self,
query,
*data,
query_parameters=None,
query_id: str | None = None,
) -> typing.Generator[pa.RecordBatch, None, None]:
"""Execute the given query in ClickHouse and stream back the response as Arrow record batches.
This method makes sense when running with FORMAT ArrowStreaming, although we currently do not enforce this.
As pyarrow doesn't support async/await buffers, this method is sync and utilizes requests instead of aiohttp.
"""
with self.post_query(
query, *data, query_parameters=query_parameters, query_id=query_id
) as response:
with pa.ipc.open_stream(pa.PythonFile(response.raw)) as reader:
for batch in reader:
yield batch
async def __aenter__(self):
"""Enter method part of the AsyncContextManager protocol."""
return self
async def __aexit__(self, exc_type, exc_value, tb):
"""Exit method part of the AsyncContextManager protocol."""
await self.session.close()
@contextlib.asynccontextmanager
async def get_client() -> collections.abc.AsyncIterator[ClickHouseClient]:
"""
Returns a ClickHouse client based on the aiochclient library. This is an
async context manager.
Usage:
async with get_client() as client:
await client.execute("SELECT 1")
Note that this is not a connection pool, so you should not use this for
queries that are run frequently.
Note that we setup the SSL context here, allowing for custom CA certs to be
used. I couldn't see a simply way to do this with `aiochclient` so we
explicitly use `aiohttp` to create the client session with an ssl_context
and pass that to `aiochclient`.
"""
# Set up SSL context, roughly based on how `clickhouse_driver` does it.
# TODO: figure out why this is not working when we set CERT_REQUIRED. We
# include a custom CA cert in the Docker image and set the path to it in
# the settings, but I can't get this to work as expected.
#
# ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS)
# ssl_context.verify_mode = ssl.CERT_REQUIRED if settings.CLICKHOUSE_VERIFY else ssl.CERT_NONE
# if ssl_context.verify_mode is ssl.CERT_REQUIRED:
# if settings.CLICKHOUSE_CA:
# ssl_context.load_verify_locations(settings.CLICKHOUSE_CA)
# elif ssl_context.verify_mode is ssl.CERT_REQUIRED:
# ssl_context.load_default_certs(ssl.Purpose.SERVER_AUTH)
timeout = aiohttp.ClientTimeout(
total=None, connect=None, sock_connect=None, sock_read=None
)
with aiohttp.TCPConnector(ssl=False) as connector:
async with aiohttp.ClientSession(
connector=connector, timeout=timeout
) as session:
async with ClickHouseClient(
session,
url=settings.CLICKHOUSE_OFFLINE_HTTP_URL,
user=settings.CLICKHOUSE_USER,
password=settings.CLICKHOUSE_PASSWORD,
database=settings.CLICKHOUSE_DATABASE,
# TODO: make this a setting.
max_execution_time=0,
max_block_size=10000,
) as client:
yield client
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.