text
stringlengths
29
850k
# Copyright (c) 2019 J. Alvarez-Jarreta and C.J. Brasher # # This file is part of the LipidFinder software tool and governed by the # 'MIT License'. Please see the LICENSE file that should have been # included as part of this software. """Graphical User Interface (GUI) to manage the parameters' collection. """ from collections import OrderedDict import os from IPython.display import display from ipywidgets import widgets, Layout import pandas from LipidFinder.Configuration import LFParameters from LipidFinder._utils import normalise_path class _TaggedToggleButton(widgets.ToggleButton): """Add "tag" attribute to widgets.ToggleButton class.""" def __init__(self, tag, **kwargs): widgets.ToggleButton.__init__(self, **kwargs) self.tag = tag class _TaggedCheckbox(widgets.Checkbox): """Add "tag" attribute to widgets.Checkbox class.""" def __init__(self, tag, **kwargs): widgets.Checkbox.__init__(self, **kwargs) self.tag = tag class _TaggedButton(widgets.Button): """Add "tag" attribute to widgets.Button class.""" def __init__(self, tag, **kwargs): widgets.Button.__init__(self, **kwargs) self.tag = tag class LFParametersGUI(LFParameters): """A LFParametersGUI object stores a set of LipidFinder parameters to be used in the specified module. This subclass of LFParameters implements a graphical interface using jupyter notebook's widgets, executed during the object creation. It allows the user to check, change and save each active parameter's value interactively. Attributes: _parameters (Private[collections.OrderedDict]) Dictionary where the parameters and their associated information are stored. _floatPointPrecision (Private[int]) Number of digits after the radix point in floats. _floatStep (Private[float]) Minimum difference between two consecutive float numbers. _style (Private[dict]) Dictionary with the default style settings for widgets. _inputWidth (Private[str]) String representation of the default width of input widgets. _widgets (Private[collections.OrderedDict]) Dictionary where the widgets for each parameter are stored. Examples: LFParametersGUI objects can be created as follows: >>> from Configuration.LFParametersGUI import ... LFParametersGUI >>> LFParametersGUI() >>> LFParametersGUI(src='/home/user/my_parameters.json') The former will load the default PeakFilter parameters and will load and display the interface afterwards. The latter will load the default PeakFilter parameters, override them with the values found in the JSON file provided, and finally it will load and display the interface. Alternatively, a specific module can be introduced as argument: >>> from Configuration.LFParametersGUI import ... LFParametersGUI >>> LFParametersGUI(module='mssearch') """ def __init__(self, precision=4, **kwargs): # type: (int, ...) -> LFParametersGUI """Constructor of the class LFParametersGUI. First, the module's parameters template file is loaded. Next, if a source JSON parameters file path is provided, the default values are overwritten by the corresponding new (valid) values. Finally, the graphical user interface is displayed. Keyword Arguments: precision -- number of decimal digits to use with floats (e.g. a precision of 2 forces a difference of 0.01 between any two consecutive float numbers) [default: 4] """ # Minimum difference between two consecutive float numbers self._floatPointPrecision = precision self._floatStep = 10 ** -(precision) # Load the parameters dictionary using parent class' constructor LFParameters.__init__(self, **kwargs) # Default style self._style = {'description_width': '0px'} # Default width of input widgets self._inputWidth = '26%' # Generate an ordered dict to store each parameter's set of # widgets in the same order as in the parameters' dict self._widgets = OrderedDict() # Create every widget of the GUI for key, data in self._parameters.items(): disabled = not self._is_active(key) # Load the information of each parameter self._widgets[key] = [self._create_label(key, disabled), self._create_help_icon(key, disabled)] # Create the input widget or container of input widgets for # each parameter type if (data['type'] == 'bool'): self._widgets[key].append( self._create_bool_widget(key, disabled)) elif (data['type'] == 'int'): self._widgets[key].append( self._create_int_widget(key, disabled)) elif (data['type'] == 'float'): self._widgets[key].append( self._create_float_widget(key, disabled)) elif (data['type'] == 'selection'): self._widgets[key].append( self._create_selection_widget(key, disabled)) elif (data['type'] == 'path'): self._widgets[key].append( self._create_path_widget(key, disabled)) elif (data['type'] == 'int range'): self._widgets[key].append( self._create_int_range_widget(key, disabled)) elif (data['type'] == 'float range'): self._widgets[key].append( self._create_float_range_widget(key, disabled)) elif (data['type'] == 'multiselection'): self._widgets[key].append( self._create_multiselection_widget(key, disabled)) elif (data['type'] == 'pairs'): self._widgets[key].append( self._create_pairs_widget(key, disabled)) else: # data['type'] == 'str' self._widgets[key].append( self._create_str_widget(key, disabled)) # Display the GUI hboxLayout = Layout(align_items='center') for key, widgetList in self._widgets.items(): display(widgets.HBox(widgetList, layout=hboxLayout)) # Finally, create the save interface to allow the user to save # the current parameters values in a JSON file display(widgets.HBox([], layout=Layout(height='15px'))) display(widgets.HBox([], layout=Layout(height='0px', border='2px solid lightgray'))) display(widgets.HBox([], layout=Layout(height='2px'))) self._widgets['save'] = self._create_save_widget() hboxLayout = Layout(justify_content='space-between', align_items='center') display(widgets.HBox(self._widgets['save'], layout=hboxLayout)) def _create_label(self, key, disabled): # type: (str, bool) -> widgets.HTML """Return an HTML widget with the parameter's description. If 'disabled' is False, the text will be in black, otherwise it will be in gray. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ text = self._parameters[key]['description'] label = ("<p style=\"font-size:110%; line-height:19px; color:{0};\">{1}" "</p>").format('Gray' if disabled else 'Black', text) return widgets.HTML(value=label, style=self._style, layout=Layout(width='50%')) def _create_help_icon(self, key, disabled): # type: (str, bool) -> widgets.HTML """Return an HTML widget with the parameter's help as tooltip of a help icon. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ if ('help' in self._parameters[key]): code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.c" "om/icon?family=Material+Icons\"><i class=\"material-icons" "\" style=\"color:{0}; font-size:18px; display:inline" "-flex; vertical-align:middle;\" title=\"{1}\">help</i>" "").format("SteelBlue", self._parameters[key]['help']) else: code = '' layout = Layout(width='2%', visibility='hidden' if disabled else 'visible') return widgets.HTML(value=code, style=self._style, layout=layout) def _create_str_widget(self, key, disabled): # type: (str, bool) -> widgets.Text """Return a Text widget with the parameter's value. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ if ('example' in self._parameters[key]): example = self._parameters[key]['example'] else: example = '' inputWidget = widgets.Text( value=self[key], description=key, placeholder=example, style=self._style, layout=Layout(width=self._inputWidth), continuous_update=False, disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._default_handler, names='value') return inputWidget def _create_bool_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing a ToggleButton widget to represent the parameter's value. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = _TaggedToggleButton( value=self[key], description='Yes' if self[key] else 'No', tag=key, style=self._style, layout=Layout(width='50%'), button_style='primary', disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._bool_handler, names='value') layout = Layout(width=self._inputWidth, justify_content='center') return widgets.HBox([inputWidget], layout=layout) def _create_int_widget(self, key, disabled): # type: (str, bool) -> widgets.BoundedIntText """Return a BoundedIntText widget with the parameter's value. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = widgets.BoundedIntText( value=self[key], description=key, min=self._min(key), max=self._max(key), style=self._style, layout=Layout(width=self._inputWidth), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'] = inputWidget.value # Add handler for when the "value" trait changes inputWidget.observe(self._default_handler, names='value') return inputWidget def _create_float_widget(self, key, disabled): # type: (str, bool) -> widgets.BoundedFloatText """Return a BoundedFloatText widget with the parameter's value. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = widgets.BoundedFloatText( value=self[key], description=key, min=self._min(key), max=self._max(key), step=self._floatStep, style=self._style, layout=Layout(width=self._inputWidth), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'] = inputWidget.value # Add handler for when the "value" trait changes inputWidget.observe(self._default_handler, names='value') return inputWidget def _create_selection_widget(self, key, disabled): # type: (str, bool) -> widgets.Dropdown """Return a Dropdown widget with the parameter's options and its current value selected. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = widgets.Dropdown( options=self._parameters[key]['options'], value=self[key], description=key, style=self._style, layout=Layout(width=self._inputWidth), disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._default_handler, names='value') return inputWidget def _create_path_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing a Text widget with the parameter's value. If the Text widget is enabled and the file does not exist, a warning icon will be displayed next to it to alert the user. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = widgets.Text( value=self[key], description=key, style=self._style, layout=Layout(width='92%'), continuous_update=False, disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._path_handler, names='value') # Create an HTML widget with a warning icon that will be # displayed if the Text widget is enabled and the file does not # exist code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.com/i" "con?family=Material+Icons\"><i class=\"material-icons\" style=" "\"font-size:18px; color:Red; display:inline-flex; vertical-ali" "gn:middle;\" title=\"File not found!\">warning</i>") warn = not disabled and not os.path.isfile(self[key]) layout = Layout(width='5%', visibility='visible' if warn else 'hidden') warnWidget = widgets.HTML(value=code, style=self._style, layout=layout) layout = Layout(width='46%', justify_content='space-between') return widgets.HBox([inputWidget, warnWidget], layout=layout) def _create_int_range_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing two BoundedIntText widgets with the parameter's range values. The widgets are created to fulfill the "int range" type condition: lower_bound < upper_bound Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ lowerBound = widgets.BoundedIntText( value=self[key][0], description=key, min=self._min(key), max=self[key][1] - 1, style=self._style, layout=Layout(width='50%'), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'][0] = lowerBound.value # Add handler for when the "value" trait changes lowerBound.observe(self._range_handler, names='value') upperBound = widgets.BoundedIntText( value=self[key][1], description=key, min=self[key][0] + 1, max=self._max(key), style=self._style, layout=Layout(width='50%'), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'][1] = upperBound.value # Add handler for when the "value" trait changes upperBound.observe(self._range_handler, names='value') return widgets.HBox([lowerBound, upperBound], layout=Layout(width=self._inputWidth)) def _create_float_range_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing two BoundedFloatText widgets with the parameter's range values. The widgets are created to fulfill the "float range" type condition: lower_bound < upper_bound Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ lowerBound = widgets.BoundedFloatText( value=self[key][0], description=key, min=self._min(key), max=self[key][1] - self._floatStep, step=self._floatStep, style=self._style, layout=Layout(width='50%'), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'][0] = lowerBound.value # Add handler for when the "value" trait changes lowerBound.observe(self._range_handler, names='value') upperBound = widgets.BoundedFloatText( value=self[key][1], description=key, min=self[key][0] + self._floatStep, max=self._max(key), step=self._floatStep, style=self._style, layout=Layout(width='50%'), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'][1] = upperBound.value # Add handler for when the "value" trait changes upperBound.observe(self._range_handler, names='value') return widgets.HBox([lowerBound, upperBound], layout=Layout(width=self._inputWidth)) def _create_multiselection_widget(self, key, disabled): # type: (str, bool) -> widgets.Box """Return a Box containing as many Checkbox widgets as parameter's options, with those in its "value" field checked. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ itemWidgets = [] for item in self._parameters[key]['options']: layoutWidth = '23%' if (len(item) <= 10) else '48%' inputWidget = _TaggedCheckbox( value=item in self[key], description=item, tag=key, style=self._style, layout=Layout(width=layoutWidth), disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._multiselection_handler, names='value') itemWidgets.append(inputWidget) layout = Layout(width='46%', display='flex', flex_flow='row wrap', justify_content='space-between') return widgets.Box(itemWidgets, layout=layout) def _create_pairs_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing the interface to add and remove pairs of available elements. The term "available elements" refers to those elements in the first column of the CSV file's path stored under the parameter's "file" key. Users will not be able to add existing pairs or pairs formed by the same element twice. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ # Load the list of available elements from the first column of # the CSV file saved under the parameter's "file" key srcFilePath = self[self._parameters[key]['file']] options = pandas.read_csv(srcFilePath).iloc[:, 0].tolist() # Create two Select widgets with the list of available elements leftSelect = widgets.Select( options=options, rows=4, style=self._style, layout=Layout(width='20%'), disabled=disabled) rightSelect = widgets.Select( options=options, rows=4, style=self._style, layout=Layout(width='20%'), disabled=disabled) # Create the add and remove buttons with the handler to add and # remove pairs, respectively addButton = _TaggedButton( description='Pair >>', tooltip='Add new pair', tag=key, layout=Layout(width='95%'), disabled=disabled) # Add handlerfor when the button is clicked addButton.on_click(self._pairs_add_handler) delButton = _TaggedButton( description='<< Remove', tooltip='Remove selected pair', tag=key, layout=Layout(width='95%'), disabled=disabled) # Add handler for when the button is clicked delButton.on_click(self._pairs_del_handler) layout = Layout(width='21%', justify_content='space-around') # Hold the buttons in a VBox to get the desired layout buttonsBox = widgets.VBox([addButton, delButton], layout=layout) # Create a Select widget with the parameter's list of pairs pairs = [' , '.join(x) for x in self[key]] pairsSelect = widgets.Select( options=pairs, rows=4, style=self._style, layout=Layout(width='28%'), disabled=disabled) layout = Layout(width='46%', justify_content='space-around') return widgets.HBox([leftSelect, rightSelect, buttonsBox, pairsSelect], layout=layout) def _create_save_widget(self): # type: () -> list """Return a list containing the interface to save the current parameters values as a JSON file in an introduced path. """ text = ("<p style=\"font-size:110%; line-height:19px; color:Black;\">" "Where do you want to save the new set of parameters?</p>") label = widgets.HTML(value=text, style=self._style, layout=Layout(width='38%')) # Create the path input widget (Text) with a default path and # file name defaultPath = normalise_path("parameters.json") inputWidget = widgets.Text( value=defaultPath, placeholder=defaultPath, style=self._style, layout=Layout(width='40%'), continuous_update=False) # Add handler for when the "value" trait changes inputWidget.observe(self._save_path_handler, names='value') # Create an HTML widget with a warning icon that will be # displayed if the directory path does not exist code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.com/i" "con?family=Material+Icons\"><i class=\"material-icons\" style=" "\"font-size:18px; color:Red; display:inline-flex; vertical-ali" "gn:middle;\" title=\"Path not found!\">warning</i>") dirPath = os.path.split(inputWidget.value)[0] visibility = 'visible' if not os.path.isdir(dirPath) else 'hidden' layout = Layout(width='2%', visibility=visibility) warnWidget = widgets.HTML(value=code, style=self._style, layout=layout) # Create a save button that will be active only if every active # parameter is valid and the destination path exists saveButton = widgets.Button( description='Save', button_style='danger', tooltip='Save parameters in a JSON file', layout=Layout(width='12%', height='35px'), disabled=not self._valid_parameters()) # Add handler for when the button is clicked saveButton.on_click(self._save_button_handler) return [label, inputWidget, warnWidget, saveButton] def _update(self): # type: () -> None """Return an HBox containing the interface to add and remove pairs of available elements. The term "available elements" refers to those elements in the first column of the CSV file's path stored under the parameter's "file" key. Users will not be able to add existing pairs or pairs formed by the same element twice. If the CSV file path changes, the pairs list will be emptied and the set of available elements will be updated. """ # Update the status and/or visibility of each parameter's widget for key in self._parameters.keys(): interface = self._widgets[key] disabled = not self._is_active(key) if (disabled): interface[0].value = interface[0].value.replace('Black', 'Gray') else: interface[0].value = interface[0].value.replace('Gray', 'Black') interface[1].layout.visibility = 'hidden' if disabled else 'visible' typeStr = self._parameters[key]['type'] if (typeStr == 'bool'): interface[2].children[0].disabled = disabled elif (typeStr in ['int', 'float']): # Update minimum and maximum bounds too interface[2].min = self._min(key) interface[2].max = self._max(key) interface[2].disabled = disabled elif (typeStr == 'path'): interface[2].children[0].disabled = disabled # Display the warning widget if the parameter is enabled # and the file does not exist if (not disabled and not os.path.isfile(self[key])): interface[2].children[1].layout.visibility = 'visible' else: interface[2].children[1].layout.visibility = 'hidden' elif (typeStr in ['int range', 'float range']): # Update minimum and maximum bounds of the range too interface[2].children[0].min = self._min(key) interface[2].children[0].disabled = disabled interface[2].children[1].max = self._max(key) interface[2].children[1].disabled = disabled elif (typeStr == 'multiselection'): for child in interface[2].children: child.disabled = disabled elif (typeStr == 'pairs'): interface[2].children[0].disabled = disabled interface[2].children[1].disabled = disabled for grandchild in interface[2].children[2].children: grandchild.disabled = disabled interface[2].children[3].disabled = disabled else: interface[2].disabled = disabled # Ensure the save button should be available and ready to save # the new set of parameters self._widgets['save'][3].description = 'Save' self._widgets['save'][3].icon = '' self._widgets['save'][3].disabled = not self._valid_parameters() def _default_handler(self, change): # type: (dict) -> None """Handle the "value" trait change assigning the new value to the corresponding parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].description self._parameters[key]['value'] = change['new'] self._update() def _bool_handler(self, change): # type: (dict) -> None """Handle the "value" trait change assigning the new value to the corresponding "bool" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].tag self._parameters[key]['value'] = change['new'] # Change ToggleButton's description to "Yes" or "No" depending # on whether its new value is True or False, respectively change['owner'].description = 'Yes' if change['new'] else 'No' self._update() def _path_handler(self, change): # type: (dict) -> None """Handle the "value" trait change assigning the new value to the corresponding "path" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].description self._parameters[key]['value'] = normalise_path(change['new']) # Replace the introduced path by its normalised version to # provide the user with more information in case there is # something wrong with the path change['owner'].value = self[key] # Get the "pairs" type parameter that has this parameter in its # "field" key to update the contents of its widgets for param, data in self._parameters.items(): if ((data['type'] == 'pairs') and (data['file'] == key)): pairsWidget = self._widgets[param][2] if (os.path.isfile(self[key])): # Update the information of available elements options = pandas.read_csv(self[key]).iloc[:, 0].tolist() pairsWidget.children[0].options = options pairsWidget.children[1].options = options else: # Since the file does not exist, there are no # available elements pairsWidget.children[0].options = [] pairsWidget.children[1].options = [] # Since the file has changed, empty the list of pairs self._parameters[param]['value'] = [] pairsWidget.children[3].options = [] break self._update() def _range_handler(self, change): # type: (dict) -> None """Handle the "value" trait change assigning the new value to the corresponding "int/float range" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].description # Both children have the same step step = self._widgets[key][2].children[0].step if (change['owner'].min == self._min(key)): # Trait changed in the widget corresponding to the lower # bound of the range self._parameters[key]['value'][0] = change['new'] self._widgets[key][2].children[1].min = change['new'] + step else: # Trait changed in the widget corresponding to the upper # bound of the range self._parameters[key]['value'][1] = change['new'] self._widgets[key][2].children[0].max = change['new'] - step self._update() def _multiselection_handler(self, change): # type: (dict) -> None """Handle the "value" trait change updating the list of values of the corresponding "multiselection" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].tag if (change['new']): self._parameters[key]['value'].append(change['owner'].description) else: self._parameters[key]['value'].remove(change['owner'].description) self._update() def _pairs_add_handler(self, button): # type: (_TaggedButton) -> None """Handle when the button is clicked to add a pair to the corresponding "pairs" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: button -- clicked button widget instance """ key = button.tag # Add selected elements in both Selection widgets as a new pair leftSel = self._widgets[key][2].children[0].value rightSel = self._widgets[key][2].children[1].value newPair = [leftSel, rightSel] # The pairs are considered sets, that is, the order of the # elements is ignored if ((leftSel != rightSel) and (newPair not in self[key]) and (newPair[::-1] not in self[key])): self._parameters[key]['value'].append(newPair) # Since the "options" field is a tuple, build a new list # with the new pair self._widgets[key][2].children[3].options = \ [' , '.join(x) for x in self[key]] self._update() def _pairs_del_handler(self, button): # type: (_TaggedButton) -> None """Handle when the button is clicked to remove a pair of the corresponding "pairs" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: button -- clicked button widget instance """ key = button.tag pairsWidget = self._widgets[key][2].children[3] # Get the selected pair from the pairs widget pairSel = pairsWidget.value if (pairSel): pair = pairSel.split(' , ') self._parameters[key]['value'].remove(pair) # Since the "options" field is a tuple, build a new list # without the deleted pair pairsWidget.options = [' , '.join(x) for x in self[key]] # Select the first pair to ensure coherence with the change if (pairsWidget.options): pairsWidget.value = pairsWidget.options[0] self._update() def _save_path_handler(self, change): # type: (dict) -> None """Handle the "value" trait change checking if the path where to save the parameters values exists. A warning sign will be displayed if the given directory path does not exist. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ newPath = normalise_path(change['new']) dirPath = os.path.split(newPath)[0] if (not os.path.isdir(dirPath)): self._widgets['save'][2].layout.visibility = 'visible' else: self._widgets['save'][2].layout.visibility = 'hidden' # Replace the introduced path by its normalised version to # provide the user with more information in case there is # something wrong change['owner'].value = newPath self._update() def _save_button_handler(self, button): # type: (widgets.Button) -> None """Handle when the button is clicked to save the parameters values in a JSON file. Keyword Arguments: button -- clicked button widget instance """ self.write(self._widgets['save'][1].value) # Change the button's text to tell the user the JSON parameters # file has been correctly created button.description = 'Saved' button.icon = 'check' def _min(self, key): # type: (str) -> object """Return the largest value in the parameter's "min" list. Applies round() method to the output of LFParameter._max() to get a more comparable result regarding floating point arithmetic issues. Keyword Arguments: key -- name of the parameter """ return round(LFParameters._min(self, key), self._floatPointPrecision) def _max(self, key): # type: (str) -> object """Return the smallest value in the parameter's "max" list. Applies round() method to the output of LFParameter._max() to get a more comparable result regarding floating point arithmetic issues. Keyword Arguments: key -- name of the parameter """ return round(LFParameters._max(self, key), self._floatPointPrecision) def _valid_parameters(self): # type: () -> bool """Return True if every active parameter has a valid value, False otherwise. The list of valid parameters also includes "save" destination path, where the JSON parameters file will be saved. """ enabledKeys = (x for x in self._parameters.keys() if self._is_active(x)) for key in enabledKeys: data = self._parameters[key] # Only "multiselection" type parameters can be empty ([]) if ((data['type'] != 'multiselection') and (data['value'] in [None, '', []])): return False # "path" type parameters must be checked manually, whilst # the rest are already controlled by their widget if ((data['type'] == 'path') and not os.path.isfile(data['value'])): return False # This method is also called when the save interface is being # created, so the "save" key will not exist yet if ('save' in self._widgets): # Check if the directory path where to save the JSON # parameters file exists dirPath = os.path.split(self._widgets['save'][1].value)[0] if (not os.path.isdir(dirPath)): return False return True
Click here to find out how far it is to Cobram Victoria AUSTRALIA. how far is it to Cobram Victoria AUSTRALIA? Tell me about travel to Cobram Victoria AUSTRALIA. I could probably use a hotel or a local tour of Cobram. Is there anywhere to hire a car in Cobram? I would also be interested in places to eat in Cobram Victoria AUSTRALIA.
from django.contrib import messages from django.shortcuts import HttpResponseRedirect from django.core.urlresolvers import reverse from smartmin.views import SmartCRUDL, SmartCreateView, SmartReadView, SmartListView from phoenix.apps.animals.models import Animal from phoenix.apps.utils.upload.views import UploadView, UploadListView, UploadDeleteView from .models import AnimalNote, AnimalDocument class AnimalDocumentUploadView(UploadView): model = AnimalDocument delete_url = 'records.animaldocument_delete' def get_context_data(self, **kwargs): context = super(AnimalDocumentUploadView, self).get_context_data(**kwargs) #context['animal'] = self.request.animal return context class AnimalDocumentListView(UploadListView): model = AnimalDocument delete_url = 'records.animaldocument_delete' def get_queryset(self): return AnimalDocument.objects.all()# filter(animal=self.kwargs['animal_id']).filter(deleted=False) class AnimalDocumentDeleteView(UploadDeleteView): model = AnimalDocument class AnimalNoteCRUDL(SmartCRUDL): model = AnimalNote class FormMixin(object): def __init__(self, **kwargs): # Prevent cyclic import errors from .forms import AnimalNoteForm self.form_class = AnimalNoteForm super(AnimalNoteCRUDL.FormMixin, self).__init__(**kwargs) class Create(FormMixin, SmartCreateView): def get(self, request, *args, **kwargs): animal_id = request.GET.get('animal', None) if not animal_id: messages.warning(request, 'Animal Id is required') return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) return super(AnimalNoteCRUDL.Create, self).get(request, *args, **kwargs) def pre_save(self, obj): animal_id = self.request.GET.get('animal', None) try: animal = Animal.objects.get(id=animal_id) except AnimalNote.DoesNotExist: messages.error(self.request, 'Animal Id is required') else: obj.animal = animal return obj def get_success_url(self): return reverse('animals.animal_read', args=[self.request.GET.get('animal', None)]) class Read(SmartReadView): fields = ('id', 'date', 'file', 'details', 'created', 'modified') def get_file(self, obj): return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>' class List(SmartListView): fields = ('id', 'date', 'file', 'details') def get_file(self, obj): if obj.file: return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>' return '' def get_queryset(self, **kwargs): queryset = super(AnimalNoteCRUDL.List, self).get_queryset(**kwargs) queryset = queryset.filter(animal=self.request.animal) return queryset # class AnimalGroupNoteCRUDL(SmartCRUDL): # model = AnimalGroupNote # # class FormMixin(object): # # def __init__(self, **kwargs): # # Prevent cyclic import errors # from .forms import AnimalGroupNoteForm # self.form_class = AnimalGroupNoteForm # super(AnimalGroupNoteCRUDL.FormMixin, self).__init__(**kwargs) # # class Create(FormMixin, SmartCreateView): # # def get(self, request, *args, **kwargs): # animalgroup_id = request.GET.get('group', None) # if not animalgroup_id: # messages.warning(request, 'Animal Group Id is required') # return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) # return super(AnimalGroupNoteCRUDL.Create, self).get(request, *args, **kwargs) # # def pre_save(self, obj): # animalgroup_id = self.request.GET.get('group', None) # try: # animalgroup = AnimalGroup.objects.get(id=animalgroup_id) # except AnimalGroup.DoesNotExist: # messages.error(self.request, 'Animal Id is required') # else: # obj.animalgroup = animalgroup # return obj # # def get_success_url(self): # return reverse('groups.animalgroup_read', args=[self.request.GET.get('group', None)]) # # class Read(SmartReadView): # fields = ('id', 'date', 'file', 'details', 'created', 'modified') # # def get_file(self, obj): # if obj.file: # return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>' # return '' # # class List(SmartListView): # fields = ('id', 'date', 'file', 'details') # # def get_file(self, obj): # if obj.file: # return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>' # return '' # # def get_queryset(self, **kwargs): # queryset = super(AnimalGroupNoteCRUDL.List, self).get_queryset(**kwargs) # queryset = queryset.filter(animalgroup=self.request.animalgroup) # return queryset
Accutane is “magic” pills used for the acne treatment. This drug has unique characteristics, and nowadays it does not have analogues. It contains Isotretinoin, a structural analogue of vitamin A. As it has analogical structure to vitamin A, the therapeutic characteristics are similar. One of the main factors of the acne formation is an excessive activity of the oil glands. Accutane lowers the production of the fatty secretion and prevents the pore clogging. The use of Accutane makes a sebaceous secretion softer due to which it is not accumulated in the sebaceous channels and is easily removed. It helps to clean the skin of the closed comedones (blackheads) and prevent the development of new acne. The activity of bacteria and inflammatory processes are reduced in the upper layers of epidermis during cleaned pores and reduced production of sebum. Pores are closed, and sebum is not kept there, so that skin becomes healthier. Accutane is effective during the severest forms of acne when neither antibiotics, nor cosmetic products help to clean the skin. In case of the light form of acne, this medicine is not used because a risk of the development of the side effect is high during the use of Accutane. The indications for the use of Accutane are the following skin diseases: nodulocystic acne, acne conglobata, closed and open comedones, etc. The pills of Accutane are taken once per day at the same time. A decision about the treatment should be made by a doctor who will consider a medical image of the disease and will prescribe a correct dose. It is considered that the optimal dose of Accutane may be prescribed according to the formula 0,5 mg per 1 kg of the body weight. If a patient weighs 70 kg, the daily dose will be 35 mg of Accutane. But in some cases (during severe forms of acne with the affection of a great part of the face and body skin), a higher dose of Accutane 1 mg per 1 kg of the body weight will be used. The first results of the treatment usually appear in 2-3 weeks, and a complete remission of acne happens in 12-20 weeks of the treatment. Accutane may be used for not more than 5 months at a run. Before beginning a new cycle of the treatment, it is necessary to make a break for 2-3 months. The use of Accutane is often accompanied by the unpleasant side effects: dry skin, dry mucous membranes, sweating, headache, nausea, lack of energy, pain in muscles and joints. The side effects may be reduced by means of the reductions of the daily dose. The symptoms of acne may become stronger in the first 1-2 weeks, and more rash may occur; this is completely normal.
#!/usr/bin/python # -*- coding: UTF-8 -*- import sys import json from django.http import JsonResponse from django.shortcuts import render from book.models import book from chapter.models import chapter def bookChapter(request): context = {} # get the book id of user input if it is not null if 'idBook' not in request.GET: context['status'] = "fail" context['message'] = "The idBook variable is not in request.GET." return JsonResponse(context) inputIdBook = request.GET['idBook'] # get the book name of user input if it is not null # if 'bookName' not in request.GET: # context['status'] = "fail" # context['message'] = "The bookName variable is not in request.GET." # return JsonResponse(context) # bookName = request.GET['bookName'] bookName = "" res, status, mes = book.getValue(inputIdBook, "name") if res: bookName = mes else: print "getchapter bookChapter error" + str(status) return render(request, 'chapter/bookChapter.html', context={'idBook': inputIdBook,'bookName': bookName}) def getChapter(request): context = {} reload(sys) sys.setdefaultencoding('utf8') # get the new book name of user input if it is not null if 'idBook' not in request.GET: context['status'] = "fail" context['message'] = "The idBook variable is not in request.GET." return JsonResponse(context) inputIdBook = request.GET['idBook'] res, statusNumber, mes = chapter.getAll(inputIdBook) if not res: context['status'] = "fail" context['message'] = "錯誤: " + mes return JsonResponse(context) context['status'] = "success" response_data = [] for m in mes: response_record = {} response_record['id'] = m.id response_record['name'] = m.name response_record['chapterOrder'] = m.chapterOrder response_record['book_name'] = book.getValue(m.idBook_id, "name")[2] response_data.append(response_record) context["message"] = response_data return JsonResponse(context)
One of our key fire safety suppliers is Briton, a renowned manufacturer of quality door closers. The Briton overhead door closer range provides door closing solutions to suit all projects, door applications and budgets. From the simplicity of a mechanical non-fire door closer for basic functionality, to a microprocessor controlled low energy operator for special applications e.g for doors required to satisfy Part M of the Building Regulations, there are Briton products to meet your needs. The Briton 121CE compact overhead door closer is suitable for the most popular door sizes. It has all the characteristics of performance, durability, functionality and quality that specifiers and users have come to expect from a Briton door control. The Briton 1100 series trimplate closers are designed to provide a mid-range product with choice of the most popular options, in a concise, cost effective package. The 2000 series offers adjustable and fixed power sizes providing a comprehensive package of solutions to suit the most popular door sizes. The Briton 2003V was designed to help you meet the requirements of BS8300:2001 for DDA use, it has variable power allowing easy on-site adjustment to meet both access and fire requirements. As well as making access easier for all users, it has all the benefits you have come to expect from the Briton 2003 – reliability, ease of installation and full certification. Further enhancements have been made for this upgrade and the Briton 2003V features both adjustable back check and the award winning Accufit installation system as standard. A closer will only perform correctly if it is fitted correctly, so ensuring accurate installation is essential to meeting the requirements of any legislation. A recent addition to the range is the Briton 2700 series, a precision manufactured cam-action slide channel closer, in a compact, overhead, surface fixed unit. Providing exceptional ease of use by reducing the resistance encountered when opening the door, the Briton 2700 Series bridges the gap between the requirements for fire and smoke control and ease of operation required for accessibility. As with the 2003V the 2700 door closer comes with the Accufit installation system to ensure correct fitting. Linked to the building’s alarm system, electromagnetic door controls eliminate the illegal practice of wedging doors open. This dangerous practice prevents a fire door functioning as it should in stopping the spread of flames and smoke in the event of a fire. The Briton 996 Series is a range of fixed power closers with an integrated electromagnetic hold open mechanism. When connected to the building fire alarm or detection system each unit can be set to either ‘hold-open’ or ‘swing-free’ operation. In either case the power of the closer can be temporarily disabled to allow free passage. When de-activated, the electromagnet disengages and the door closer closes the door in the normal manner to maintain fire safety. The new Briton 1130B.TE has been designed to hold the door open during normal use whilst connected to the building fire alarm or smoke detection system having an electromagnet in the slide track. On sounding of the fire alarm, or in the event of a power failure, the electromagnet will be deactivated and the door will close in the normal controlled manner.
# RandTalkBot Bot matching you with a random person on Telegram. # Copyright (C) 2016 quasiyoke # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import datetime import json import logging from peewee import DateTimeField, Model, Proxy, TextField LOGGER = logging.getLogger('randtalkbot.stats') def _(string): return string DATABASE_PROXY = Proxy() RATIO_MAX = 10 class Stats(Model): data_json = TextField() created = DateTimeField(default=datetime.datetime.utcnow, index=True) class Meta: database = DATABASE_PROXY def __init__(self, *args, **kwargs): super(Stats, self).__init__(*args, **kwargs) self._data_cache = None def get_data(self): if self._data_cache is None: self._data_cache = json.loads(self.data_json) return self._data_cache def set_data(self, data): self._data_cache = data self.data_json = json.dumps(data) def get_sex_ratio(self): """https://en.wikipedia.org/wiki/Human_sex_ratio Returns: float: Ratio of males over the females. """ try: sex_data = self.get_data()['sex_distribution'] except (KeyError, TypeError): return 1 males_count = sex_data.get('male', 0) females_count = sex_data.get('female', 0) if males_count > 0 and females_count > 0: return males_count / females_count elif males_count > 0: return RATIO_MAX elif females_count > 0: return 1 / RATIO_MAX return 1
biblical quotes about passing judgement bible verses about helping others bible god lord savior bible verses bible quotes verses bible verse passing judgement. quotes about a judgemental society submitted by quotes about judgemental society. quotes on not judging a book by its cover quote quotes similar to judging a book by its cover. quotes on judging a book by its cover discover ideas about my past quotes quotes about judging book by its cover. bible quotes about particular judgement where are the two judgments in the bible bible quotes about judgement day. bible quotes about judgement day prayer bible quotes judgement day. quotes for not judging a book by its cover quote on mental health stigma not all wound are so visible walk gently in quotes about judging book by its cover. judging someone by their appearance quotes judgemental people quotes inspirational best non judgement images on in of judgemental judging appearance quotes. bible quotes on being judgemental arguing bible verses bible verses bible verses about relationships bible verses quotes bible quotes judgement day. judgment at nuremberg movie quotes judgement at nuremberg movie quotes.
# -*- coding: utf-8 -*- ''' Created on 15 Φεβ 2013 @author: tedlaz ''' sqlco = u"INSERT INTO m12_co VALUES (1,'{0}','{1}','{2}',{3},'{4}','{5}','{6}','{7}','{8}','{9}','{10}','{11}','{12}','{13}')" from PyQt4 import QtCore, QtGui,Qt import utils_db,widgets import osyk from utils_qt import fFindFromList import datetime class NewDbWizard(QtGui.QWizard): def __init__(self, parent=None): super(NewDbWizard, self).__init__(parent) #self.setAttribute(Qt.Qt.WA_DeleteOnClose) Οχι γιατί δημιουργείται πρόβλημα ... #self.addPage(IntroPage()) self.addPage(coDataPage()) self.addPage(coDataPage2()) self.addPage(filePage()) self.addPage(finalPage()) self.setWizardStyle(QtGui.QWizard.ModernStyle) self.setOption(QtGui.QWizard.IndependentPages,True) #self.setPixmap(QtGui.QWizard.BannerPixmap,QtGui.QPixmap(':/banner')) #self.setPixmap(QtGui.QWizard.BackgroundPixmap, QtGui.QPixmap(':/background')) self.setWindowTitle(u"Οδηγός Δημιουργίας Νέου Αρχείου Μισθοδοσίας") def accept(self): #print '%s %s %s' % (self.field('epon'),self.field('cotyp_id'),self.field('fname')) fileSql = open(osyk.newDbFile) script = u'' for lines in fileSql: script += u'%s' % lines.decode('utf-8') utils_db.executeScript(script, self.field('fname')) sqlCo = sqlco.format(self.field('epon'),self.field('onom'),self.field('patr'),self.field('cotyp_id'), self.field('ame'),self.field('afm'),self.field('doy'),self.field('dra'), self.field('pol'),self.field('odo'),self.field('num'),self.field('tk'), self.field('ikac'),self.field('ikap')) print sqlCo utils_db.commitToDb(sqlCo, self.field('fname')) sqlCoy = u"INSERT INTO m12_coy VALUES (1,1,'Κεντρικό','%s')" % self.field('kad') utils_db.commitToDb(sqlCoy, self.field('fname')) etos = datetime.datetime.now().year utils_db.commitToDb(u"INSERT INTO m12_xrisi (xrisi,xrisip) VALUES ('{0}','Χρήση {0}')".format(etos), self.field('fname')) eidList = osyk.eid_cad_listFilteredDouble(self.field('kad')) #print eidList sqleid_ = u"INSERT INTO m12_eid (eidp,keid) VALUES ('{0}','{1}');\n" sqleid = u'' for el in eidList: sqleid += sqleid_.format(el[1],el[0]) utils_db.executeScript(sqleid,self.field('fname')) super(NewDbWizard, self).accept() class IntroPage(QtGui.QWizardPage): def __init__(self, parent=None): super(IntroPage, self).__init__(parent) self.setTitle(u"Οδηγίες") #self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark1')) label = QtGui.QLabel(u"Αυτός ο οδηγός θα δημιουργήσει νέο Αρχείο Μισθοδοσίας.\n\n " u"Εσείς θα πρέπει απλά να εισάγετε τις απαραίτητες παραμέτρους " u"καθώς και το όνομα του αρχείου και το σημείο αποθήκευσης.\n\n" u"Μπορείτε σε κάθε βήμα να αναθεωρήσετε και να επιστρέψετε.\n\n" u"Πατήστε δημιουργία στην τελευταία οθόνη για να ολοκληρώσετε.") label.setWordWrap(True) layout = QtGui.QVBoxLayout() layout.addWidget(label) self.setLayout(layout) class coDataPage(QtGui.QWizardPage): def __init__(self, parent=None): super(coDataPage, self).__init__(parent) #parent.button(QtGui.QWizard.BackButton).setVisible(False) #self.buttonText(QtGui.QWizard.NextButton) self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω') self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >') self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση') self.setTitle(u"Πληροφορίες εταιρίας") self.setSubTitle(u"Συμπληρώστε τα βασικά στοιχεία της εταιρίας") #self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1')) cotypLabel = QtGui.QLabel(u"Τύπος επιχείρησης:") cotyp = widgets.DbComboBox([[1,u'Νομικό Πρόσωπο'],[2,u'Φυσικό Πρόσωπο']]) cotypLabel.setBuddy(cotyp) eponNameLabel = QtGui.QLabel(u"Επωνυμία:") eponNameLineEdit = QtGui.QLineEdit() eponNameLabel.setBuddy(eponNameLineEdit) onomLabel = QtGui.QLabel(u"Όνομα (Για φυσικά πρόσωπα):") onomLineEdit = QtGui.QLineEdit() onomLineEdit.setDisabled(True) onomLabel.setBuddy(onomLineEdit) patrLabel = QtGui.QLabel(u"Πατρώνυμο (Για φυσικά πρόσωπα):") patrLineEdit = QtGui.QLineEdit() patrLineEdit.setDisabled(True) patrLabel.setBuddy(patrLineEdit) cotypValue = QtGui.QLineEdit() cotypValue.setText('1') def onCotypActivated(): if cotyp.currentIndex() ==1: onomLineEdit.setDisabled(False) patrLineEdit.setDisabled(False) cotypValue.setText('2') else: onomLineEdit.setText('') patrLineEdit.setText('') onomLineEdit.setDisabled(True) patrLineEdit.setDisabled(True) cotypValue.setText('1') cotyp.activated.connect(onCotypActivated) kadLabel = QtGui.QLabel(u"Κωδικός αρ.Δραστηριότητας:") kadLineEdit = QtGui.QLineEdit() kadLabel.setBuddy(kadLineEdit) kadLineEdit.setReadOnly(True) kadFindButton = QtGui.QPushButton(u'Εύρεση ΚΑΔ') kadLayout = QtGui.QHBoxLayout() kadLayout.addWidget(kadLineEdit) kadLayout.addWidget(kadFindButton) kadpLabel = QtGui.QLabel(u"Περιγραφή αρ.Δραστηριότητας:") kadpTextEdit = QtGui.QTextEdit() kadpLabel.setBuddy(kadpTextEdit) kadpTextEdit.setReadOnly(True) draLabel = QtGui.QLabel(u"Συντομογραφία Δραστηριότητας:") draLineEdit = QtGui.QLineEdit() draLabel.setBuddy(draLineEdit) def openFindDlg(): kadList = osyk.cad_list() head = [u'ΚΑΔ',u'Περιγραφή'] cw = [35,300] form = fFindFromList(kadList,head,cw) if form.exec_() == QtGui.QDialog.Accepted: kadLineEdit.setText(form.array[0]) kadpTextEdit.setText(form.array[1]) kadFindButton.clicked.connect(openFindDlg) self.registerField('cotyp_id',cotypValue) self.registerField('epon*', eponNameLineEdit) self.registerField('onom', onomLineEdit) self.registerField('patr', patrLineEdit) self.registerField('kad*', kadLineEdit) self.registerField('dra*', draLineEdit) #self.registerField('kadt*', kadpTextEdit) layout = QtGui.QGridLayout() layout.addWidget(cotypLabel, 0, 0) layout.addWidget(cotyp, 0, 1) layout.addWidget(eponNameLabel, 1, 0) layout.addWidget(eponNameLineEdit, 1, 1) layout.addWidget(onomLabel, 2, 0) layout.addWidget(onomLineEdit, 2, 1) layout.addWidget(patrLabel, 3, 0) layout.addWidget(patrLineEdit, 3, 1) layout.addWidget(kadLabel, 4, 0) layout.addLayout(kadLayout, 4, 1) layout.addWidget(kadpLabel,5, 0) layout.addWidget(kadpTextEdit, 5, 1,2,1) layout.addWidget(draLabel,7, 0) layout.addWidget(draLineEdit,7, 1) self.setLayout(layout) class coDataPage2(QtGui.QWizardPage): def __init__(self, parent=None): super(coDataPage2, self).__init__(parent) self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω') self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >') self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση') self.setTitle(u"Πληροφορίες εταιρίας") self.setSubTitle(u"Συμπληρώστε τα υπόλοιπα στοιχεία της εταιρίας") afmLabel = QtGui.QLabel(u"ΑΦΜ:") afmLineEdit = QtGui.QLineEdit() afmLabel.setBuddy(afmLineEdit) doyLabel = QtGui.QLabel(u"ΔΟΥ:") doyLineEdit = QtGui.QLineEdit() doyLabel.setBuddy(doyLineEdit) doyLineEdit.setReadOnly(True) doyFindButton = QtGui.QPushButton(u'...') doyFindButton.setMaximumSize(QtCore.QSize(20, 50)) doyLayout = QtGui.QHBoxLayout() doyLayout.addWidget(doyLineEdit) doyLayout.addWidget(doyFindButton) def openFindDlg(): head = [u'Κωδ',u'ΔΟΥ'] cw = [35,300] form = fFindFromList(osyk.doy_list(),head,cw) if form.exec_() == QtGui.QDialog.Accepted: doyLineEdit.setText(form.array[1]) doyFindButton.clicked.connect(openFindDlg) poliLabel = QtGui.QLabel(u"Πόλη:") poliLineEdit = QtGui.QLineEdit() poliLabel.setBuddy(poliLineEdit) tkLabel = QtGui.QLabel(u"Ταχ.Κωδικός:") tkLineEdit = QtGui.QLineEdit() tkLabel.setBuddy(tkLineEdit) odosLabel = QtGui.QLabel(u"Οδός:") odosLineEdit = QtGui.QLineEdit() odosLabel.setBuddy(odosLineEdit) numLabel = QtGui.QLabel(u"Αριθμός:") numLineEdit = QtGui.QLineEdit() numLabel.setBuddy(numLineEdit) ameLabel = QtGui.QLabel(u"Αρ.Μητρ.ΙΚΑ:") ameLineEdit = QtGui.QLineEdit() ameLabel.setBuddy(ameLineEdit) ikacLabel = QtGui.QLabel(u"Κωδ.ΙΚΑ:") ikacLineEdit = QtGui.QLineEdit() ikacLabel.setBuddy(ikacLineEdit) ikacLineEdit.setReadOnly(True) ikaLabel = QtGui.QLabel(u"Υπ/μα.ΙΚΑ:") ikaLineEdit = QtGui.QLineEdit() ikaLabel.setBuddy(ikaLineEdit) ikaLineEdit.setReadOnly(True) ikaFindButton = QtGui.QPushButton(u'...') ikaFindButton.setMaximumSize(QtCore.QSize(20, 50)) ikaLayout = QtGui.QHBoxLayout() ikaLayout.addWidget(ikaLineEdit) ikaLayout.addWidget(ikaFindButton) def openFindDlgIKA(): head = [u'Κωδ',u'Υποκατάστημα ΙΚΑ'] cw = [35,300] form = fFindFromList(osyk.ika_list(),head,cw) if form.exec_() == QtGui.QDialog.Accepted: ikacLineEdit.setText(form.array[0]) ikaLineEdit.setText(form.array[1]) ikaFindButton.clicked.connect(openFindDlgIKA) self.registerField('afm*',afmLineEdit) self.registerField('doy*',doyLineEdit) self.registerField('pol*',poliLineEdit) self.registerField('odo',odosLineEdit) self.registerField('num',numLineEdit) self.registerField('tk',tkLineEdit) self.registerField('ikac*',ikacLineEdit) self.registerField('ikap*',ikaLineEdit) self.registerField('ame*',ameLineEdit) layout = QtGui.QGridLayout() layout.addWidget(afmLabel, 0, 0) layout.addWidget(afmLineEdit, 0, 1) layout.addWidget(doyLabel, 0, 2) layout.addLayout(doyLayout, 0, 3) layout.addWidget(poliLabel, 1, 0) layout.addWidget(poliLineEdit, 1, 1) layout.addWidget(tkLabel, 1, 2) layout.addWidget(tkLineEdit, 1, 3) layout.addWidget(odosLabel, 2, 0) layout.addWidget(odosLineEdit, 2, 1) layout.addWidget(numLabel, 2, 2) layout.addWidget(numLineEdit, 2, 3) layout.addWidget(ameLabel, 3, 0) layout.addWidget(ameLineEdit, 3, 1) layout.addWidget(ikacLabel, 4, 0) layout.addWidget(ikacLineEdit, 4, 1) layout.addWidget(ikaLabel, 4, 2) layout.addLayout(ikaLayout, 4, 3) self.setLayout(layout) class filePage(QtGui.QWizardPage): def __init__(self, parent=None): super(filePage, self).__init__(parent) self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω') self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >') self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση') self.setTitle(u"Όνομα αρχείου") self.setSubTitle(u"Δώστε όνομα και περιοχή αποθήκευσης") #self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1')) fileNameLabel = QtGui.QLabel(u"Όνομα αρχείου:") self.fileNameLineEdit = QtGui.QLineEdit() self.fileNameLineEdit.setReadOnly(True) fileNameLabel.setBuddy(self.fileNameLineEdit) butFile = QtGui.QPushButton(u'...') butFile.clicked.connect(self.fSave) fileLayout = QtGui.QHBoxLayout() fileLayout.addWidget(self.fileNameLineEdit) fileLayout.addWidget(butFile) patrLabel = QtGui.QLabel(u"Πατρώνυμο (Για φυσικά πρόσωπα):") patrLineEdit = QtGui.QLineEdit() patrLabel.setBuddy(patrLineEdit) cotypLabel = QtGui.QLabel(u"Τύπος επιχείρησης:") cotyp = QtGui.QComboBox() cotypLabel.setBuddy(cotyp) cotyp.addItems([u'1.Νομικό Πρόσωπο',u'2.Φυσικό Πρόσωπο']) self.registerField('fname*', self.fileNameLineEdit) layout = QtGui.QGridLayout() layout.addWidget(fileNameLabel, 0, 0) layout.addLayout(fileLayout, 0, 1) self.setLayout(layout) def fSave(self): fileName = QtGui.QFileDialog.getSaveFileName(self, "QFileDialog.getSaveFileName()", self.field('fname'), "payroll m13 (*.m13)", QtGui.QFileDialog.Options()) if fileName: self.fileNameLineEdit.setText(fileName) class finalPage(QtGui.QWizardPage): def __init__(self, parent=None): super(finalPage, self).__init__(parent) self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω') self.setButtonText(QtGui.QWizard.FinishButton,u'Ολοκλήρωση') self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση') self.setTitle(u"Δημιουργία αρχείου ") #self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark2')) self.label = QtGui.QLabel() self.label.setWordWrap(True) layout = QtGui.QVBoxLayout() layout.addWidget(self.label) self.setLayout(layout) def initializePage(self): finishText = self.wizard().buttonText(QtGui.QWizard.FinishButton) finishText.replace('&', '') txt = u'Προσοχή , θα δημιουργηθεί αρχείο μισθοδοσίας με τις παρακάτω παραμέτρους :\n\n' txt += u'Στοιχεία Επιχείρησης : %s \n\n' % self.field('epon') txt += u'Όνομα Αρχείου : %s \n\n' % self.field('fname') txt += u"\nΠατήστε %s για να ολοκληρωθεί η διαδικασία." % finishText txt += u"\n\nΜε την ολοκλήρωση της διαδικασίας το νέο αρχείο είναι έτοιμο για εισαγωγή δεδομένων!!!" self.label.setText(txt) if __name__ == '__main__': import sys app = QtGui.QApplication(sys.argv) wizard = NewDbWizard() wizard.show() sys.exit(app.exec_())
Planning foods when pregnant can take a bit more idea, as your body requires extra energy and nutrients. With a few simple meal planning, getting the nutrition you need every day is easy. Full of many veggies, fruits, whole grains, and lean proteins, this strategy provides the nutrition you need in a tasty and exciting manner. The recipes meet Healthy Pregnancy nourishment parameters, meaning they deliver healthful nutrients moms-to-be want more of and they do not have any off-limits foods such as alcohol or unpasteurized cheese. This meal strategy is based on a 2, 900 calorie diet, but you may want more or fewer calories dependant upon your distinctive nourishment needs. – Toast the bread and top with mashed avocado. Season with just a pinch of pepper, and when desired, a dab of hot sauce. – Split English muffin in half. – Toast waffles then top with yogurt and drizzle with honey. – Serve casserole with a side of the spinach, dressed with 2 Tbsp. Olive oil and 1 Tbsp. Balsamic vinegar. Please Note: This meal strategy is controlled for calories, protein, folic acid, fiber, iron, calcium, and sodium.
def _create_pins(): """ :rtype: Iterable[int] """ middle = 5000 for i in range(0, 2 * middle): if i % 2 == 0: yield middle - i // 2 - 1 else: yield middle + i // 2 PINS = tuple(_create_pins()) assert len(PINS) == 10000, "Len = %d" % len(PINS) assert min(PINS) == 0000 assert max(PINS) == 9999 def get_pin_index(pin): return 0 if pin is None else PINS.index(pin) def pin_generator(last_pin=None): """ :type last_pin: int or None :rtype: Iterable[int] """ start_pos = get_pin_index(last_pin) + 1 if last_pin is not None else 0 for i in range(start_pos, len(PINS)): yield PINS[i] def test_selector(): print(get_pin_index(6000)) l1 = list(pin_generator(last_pin=9997)) assert len(frozenset(l1)) == 4 l2 = list(pin_generator(last_pin=4999)) assert len(frozenset(l2)) == 9999 l3 = list(pin_generator(last_pin=5000)) assert len(frozenset(l3)) == 9998 if __name__ == '__main__': test_selector()
Worrying about your chimney is stressful, but we’re here to help solve your needs with quality customer care! Scheduling an appointment is the first step in addressing your concerns, whether you have a leaky chimney, need masonry repair, or even want a general chimney inspection. The first step in scheduling an appointment is filling out basic information through our online form. This provides us a reference point, when we call you to get into specifics about your needs. It is a straightforward process from there, once we get in touch with you we can move closer to solving any chimney problems you have. With that said, an online form can only provide so much information. In order to provide the best quality customer service and experience in the region, we need your help. These are five appointment tips that you can use to ensure that we provide you a maximum return on investment. By taking initiative as a fireplace or chimney owner, you’re taking a shortcut to a faster and more efficient service. If you are scheduling an annual inspection or sweeping, keep in mind that the best time for maintenance is in the spring & summer. During the colder months, we are often booked out far in advance and working to be there for our customers during emergencies. Scheduling routine maintenance in the “off-season” allows us to offer prompter, more flexible service. Speaking of maintenance, it is a wise decision to let us know immediately if you suspect that there may be damage to your chimney or feel there might be a problem with it. Sometimes animals or pests take up residence in it if you don’t have a chimney cap to protect it. Or maybe you have a leaky chimney that drips down into your living room, causing water damage. Whatever the case, let us know when you suspect. After all, you deserve to have peace of mind. Save yourself headache and financial heartbreak by contacting us ASAP. To attain your peace of mind, it helps our professional team and technicians when you provide specific details about your problem. There is nothing wrong with being unsure about the situation – after all we’re the professionals – but it goes a long way when you describe characteristics about your concern. For example, you can say: “I’ve noticed that there are drafts coming in from my fireplace, I think that the hot air may be escaping” or “I suspect that there are raccoon’s or birds in my chimney”. These descriptions of your concerns are specific and concise, writing down your fears ahead of time helps us to troubleshoot and prepare a solution to your chimney problem. Savvy chimney and fireplace owners will likely have a few dates set in advance for chimney inspection and have specific concerns at hand. To fully implement a successful chimney inspection strategy though, chimney and fireplace owners are encouraged to prepare their home accordingly. If we are to service your fireplace, please discontinue use at least 24 hours before our arrival and remove the ash from the ashbox. We also ask that you move any precious items or breakables from the 5-6 foot area surrounding the appliance being serviced. This will help guarantee that our technicians can get right to work safely and quickly. You are our key partner in addressing your chimney or fireplace needs. We attribute our 30-year track record of success to collaboration with homeowners like you. Are you ready to take the next step in solving your chimney needs? Schedule an appointment today!
### # # WEIO Web Of Things Platform # Copyright (C) 2013 Nodesign.net, Uros PETREVSKI, Drasko DRASKOVIC # All rights reserved # # ## ## ######## #### ####### # ## ## ## ## ## ## ## # ## ## ## ## ## ## ## # ## ## ## ###### ## ## ## # ## ## ## ## ## ## ## # ## ## ## ## ## ## ## # ### ### ######## #### ####### # # Web Of Things Platform # # This file is part of WEIO and is published under BSD license. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. All advertising materials mentioning features or use of this software # must display the following acknowledgement: # This product includes software developed by the WeIO project. # 4. Neither the name of the WeIO nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY WEIO PROJECT AUTHORS AND CONTRIBUTORS ''AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL WEIO PROJECT AUTHORS AND CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors : # Uros PETREVSKI <uros@nodesign.net> # Drasko DRASKOVIC <drasko.draskovic@gmail.com> # ### from weioLib.weioIO import * from weioUserApi import serverPush from weioLib import weioRunnerGlobals import platform, sys # WeIO API bindings from websocket to lower levels # Each data argument is array of data # Return value is dictionary def callPinMode(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): pinMode(data[0],data[1]) else : print "pinMode ON PC", data return None def callPortMode(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): portMode(data[0],data[1]) else : print "pinMode ON PC", data return None def callDigitalWrite(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): digitalWrite(data[0], data[1]) else : print "digitalWrite ON PC", data return None def callDigitalRead(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = digitalRead(data[0]) bck["data"] = value bck["pin"] = data[0] else : print "digitalRead ON PC", data bck["data"] = 1 # faked value bck["pin"] = data[0] # pin return bck def callPulseIn(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = pulseIn(data[0], data[1], data[2]) bck["data"] = value bck["pin"] = data[0] bck["level"] = data[1] bck["timeout"] = data[1] else : print "pulseIn ON PC", data bck["data"] = 1 # faked value bck["pin"] = data[0] # pin bck["level"] = data[1] # level bck["timeout"] = data[2] # timeout return bck def callPortWrite(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): portWrite(data[0], data[1]) else : print "portWrite ON PC", data return None def callPortRead(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = portRead(data[0]) bck["data"] = value bck["port"] = data[0] else : print "digitalRead ON PC", data bck["data"] = 1 # faked value bck["port"] = data[0] # pin return bck def callDHTRead(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): dhtRead(data[0]) else : print "dhtRead ON PC", data return None def callAnalogRead(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): #print "From browser ", data value = analogRead(data[0]) # this is pin number bck["data"] = value bck["pin"] = data[0] else : print "analogRead ON PC", data bck["data"] = 1023 # faked value bck["pin"] = data[0] return bck def callSetPwmPeriod(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): setPwmPeriod(data[0],data[1]) else: print "setPwmPeriod ON PC", data return None # def callSetPwmLimit(data) : # if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): # setPwmLimit(data[0]) # else: # print "setPwmLimit ON PC", data # return None def callPwmWrite(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): pwmWrite(data[0], data[1]) else : print "pwmWrite ON PC", data return None def callProportion(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): #print "From browser ", data value = proportion(data[0],data[1],data[2],data[3],data[4]) bck["data"] = value else : print "proportion ON PC", data bck["data"] = data return bck def callAttachInterrupt(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): iObj = {"pin" : data[0], "jsCallbackString" : data[2]} attachInterrupt(data[0], data[1], genericInterrupt, iObj) else: print "attachInterrupt ON PC", data return None def callDetachInterrupt(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): detachInterrupt(data[0]) else: print "detachInterrupt ON PC", data return None def genericInterrupt(event, obj): bck = {} bck["data"] = obj["pin"] bck["eventType"] = getInterruptType(event["type"]) serverPush(obj["jsCallbackString"], bck) def callDelay(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): delay(data[0]) else : print "delay ON PC", data return None def callTone(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): print "TONE VALS", len(data) if (len(data)==2): tone(data[0], data[1]) elif (len(data)==3): tone(data[0], data[1], data[2]) else : print "tone ON PC", data return None def callNotone(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): noTone(data[0]) else : print "notone ON PC", data return None def callConstrain(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): constrain(data[0], data[1], data[2],) bck["data"] = value else : print "contrain ON PC", data bck["data"] = 1 # faked value bck["pin"] = data[0] # pin return bck def callMillis(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = millis() bck["data"] = value else : print "millis ON PC", data bck["data"] = 0 # faked value return bck def callGetTemperature(data): bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = getTemperature() bck["data"] = value else : print "getTemperature ON PC", data bck["data"] = 0 # faked value return bck def callUserMesage(data): print "USER TALKS", data #weioRunnerGlobals.userMain def pinsInfo(data) : bck = {} bck["data"] = weioRunnerGlobals.DECLARED_PINS #print("GET PIN INFO ASKED!", bck["data"]) return bck def callListSerials(data): bck = {} bck["data"] = listSerials() return bck # UART SECTION clientSerial = None def callInitSerial(data): global clientSerial if (clientSerial is None) : clientSerial = initSerial(data[0], data[1]) def callSerialWrite(data): global clientSerial if not(clientSerial is None) : clientSerial.write(data) else : sys.stderr.write("Serial port is not initialized. Use initSerial function first") def callSerialRead(data): global clientSerial bck = {} if not(clientSerial is None) : bck["data"] = clientSerial.read() else : sys.stderr.write("Serial port is not initialized. Use initSerial function first") return bck # SPI SECTION SPI = None def callInitSPI(data): global SPI if (SPI is None) : SPI = initSPI(data[0]) def callWriteSPI(data): global SPI if not(SPI is None) : SPI.write(data[0]) else : sys.stderr.write("SPI port is not initialized. Use initSerial function first") def callReadSPI(data): global SPI bck = {} if not(SPI is None) : bck["data"] = SPI.read(data[0]) else : sys.stderr.write("SPI port is not initialized. Use initSerial function first") return bck ### # WeIO native spells ### weioSpells = { "digitalWrite" :callDigitalWrite, "digitalRead" :callDigitalRead, "pulseIn" :callPulseIn, "portWrite" :callPortWrite, "portRead" :callPortRead, "dhtRead" :callDHTRead, "analogRead" :callAnalogRead, "pinMode" :callPinMode, "portMode" :callPortMode, "setPwmPeriod" :callSetPwmPeriod, "pwmWrite" :callPwmWrite, "proportion" :callProportion, "attachInterrupt" :callAttachInterrupt, "detachInterrupt" :callDetachInterrupt, "tone" :callTone, "noTone" :callNotone, "constrain" :callConstrain, "millis" :callMillis, "getTemperature" :callGetTemperature, "delay" :callDelay, "pinsInfo" :pinsInfo, "listSerials" :callListSerials, "initSerial" :callInitSerial, "serialWrite" :callSerialWrite, "initSPI" :callInitSPI, "readSPI" :callReadSPI, "writeSPI" :callWriteSPI # "message":callUserMesage } ### # User added spells (handlers) ### weioUserSpells = {} def addUserEvent(event, handler): global weioUserSpells #print "Adding event ", event #print "and handler ", handler weioUserSpells[event] = handler def removeUserEvents(): global weioUserSpells weioUserSpells.clear()
Do you know the feeling of finally getting your hands on a book that you’ve wanted for a long time? About six months ago I wandered into my local, enormous, largely impersonal book vendor (who for free-marketing sake will remain nameless) and asked if they carried the graphic novel The Pride of Baghdad. Of course they didn’t, but they probably could order it in. I didn’t want to go through the trouble of handing out all of my personal information one more time, so I said no thanks. Today, after sauntering in to spend a gift card, I noticed it on a wall of other graphic novels. I snatched it up and can’t wait to read it.
#!/usr/bin/env python # -*- coding: UTF-8 -*- from django.template import Library import re DEBUG = False register = Library() @register.filter def highlight_format(value): p_sub = re.compile('__codestart__ (\w+)') value = p_sub.sub(r'<pre name="code" class="\g<1>">', value) p_sub = re.compile(r'__codeend__', re.VERBOSE) value = p_sub.sub(r'</pre>', value) if DEBUG: print value print '+' * 80 p_highlight = re.compile(r'(<pre name="code" class="\w+">)(?P<codeblock>.*)(</pre>)', re.S) f_list = p_highlight.findall(value) if f_list: s_list = p_highlight.split(value) if DEBUG: for i in s_list: print i print '=' * 80 for code_block in p_highlight.finditer(value): code = code_block.group('codeblock') index = s_list.index(code) code = code.replace('&lt;', '<') code = code.replace('&gt;', '>') code = code.replace('&amp;', '&') code = code.replace('<p>', '') code = code.replace('</p>', '') s_list[index] = code value = ''.join(s_list) return value
You may list more or less than ten, but please share your favorite manga with us. If you would like to update your list, then just quote your previous one for a quicker and easier comparison. I'll list my top five to start off. I could only manage to fit five titles into my list, and even right now I'm not that confident in the ordering of these titles. The more I read, the more I'll be able to fill in here, so look out for titles like Kiichi!! showing up soon. Alright....I too like Lydia have far too many to list. I'll list the first favorites that come to mind. The first 3 are my true #'s 1-3 but after that no particular order is there. I don't know if I've read enough to have a top 10 list. I only read six manga! So I will list them. In order of preference. Well since ya wanna say you read so much manga I expect you to assist Aya in making the Manga section flourish. So get cracking missy. Psycho 101 wrote: Well since ya wanna say you read so much manga I expect you to assist Aya in making the Manga section flourish. So get cracking missy. Long ago there was much talk amongst us of making the actual ANN manga section grow like mad, but that was also when I ended up "quitting the internet". But, I'll do what I can to make it boom on both forums! I just need to finish my application to Japan first, so give me a couple weeks, and then it's ON! Not a Jellyfish wrote: But, I'll do what I can to make it boom on both forums! I just need to finish my application to Japan first, so give me a couple weeks, and then it's ON! Oh it's ON eh? Well BRING IT! Why do I have to be the only one who reads so much manga on here?? Well, I used to read a lot more till I got to college. Then I starting watching more anime. You've reminded me of how far behind I probably am with some of the releases I've been keeping up with. Let's see... What do I have still lying around my apartment: Gunm - Last Order, Ah! Megami-sami, NHK ni Yokoso!, Akira, Nausicaa of the Valley of the Wind, Gunsmith Cats (which will likely be unloaded at Book Off in the near future), Ghost in the Shell (English and Japanese), Dominion and a pile of flotsam. The titles I've listed in italics are series I've purchased the Japanese tankoubon of. Glad I could remind you of manga. Hehehe. When I started college it was actually the anime that went and the manga that increased. Oppostie for me. Uh... that was way more than 10. Oh well. I have a lot of manga.
# Caesar's Cipher is a very famous encryption technique used in cryptography. It is a type of substitution # cipher in which each letter in the plaintext is replaced by a letter some fixed number of positions down # the alphabet. For example, with a shift of 3, D would be replaced by G, E would become H, X would become A # and so on. # # Encryption of a letter X by a shift K can be described mathematically as # EK(X)=(X+K) % 26. # # Given a plaintext and it's corresponding ciphertext, output the minimum non-negative value of shift that was # used to encrypt the plaintext or else output −1 if it is not possible to obtain the given ciphertext from # the given plaintext using Caesar's Cipher technique. # # Input: # # The first line of the input contains Q, denoting the number of queries. # # The next Q lines contain two strings S and T consisting of only upper-case letters. # # Output: # # For each test-case, output a single non-negative integer denoting the minimum value of shift that was used # to encrypt the plaintext or else print −1 if the answer doesn't exist. # # Constraints: # 1≤Q≤5 # 1≤|S|≤10^5 # 1≤|T|≤10^5 # |S| = |T| # # SAMPLE INPUT # 2 # ABC # DEF # AAA # PQR # # SAMPLE OUTPUT # 3 # -1 # My Solution for _ in range(int(input())): string_one = input() string_two= input() check_one = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' # ZYXWVUTSRQPONMLKJIHGFEDCBA check_two = check_one[::-1] result = [] for i in range(len(string_one)): if(check_one.find(string_one[i]) > check_one.find(string_two[i])): result.append(check_two.find(string_one[i]) + check_one.find(string_two[i]) + 1) else: result.append(check_one.find(string_two[i]) - check_one.find(string_one[i])) if result.count(result[0]) == len(string_one): print(result[0]) else: print(-1) # More Efficient Solution: tests = int(input().strip()) for i in range(tests): plain = input().strip() cipher = input().strip() shift = (ord(cipher[0])-ord(plain[0])+26)%26 valid = True for j in range(len(plain)): if (ord(cipher[j])-ord(plain[j])+26)%26 != shift: valid = False break print(shift) if valid else print("-1")
Caratini will undergo surgery on his fractured left hamate bone Monday, Bruce Levine of 670TheScore.com reports. ROTOWIRE RECOMMENDS: Caratini suffered the injury in his final at-bat Thursday against the Pirates. He's expected to be out 4-to-6 weeks. He'll head to the injured list, with Taylor Davis called up from Triple-A Iowa to back up Willson Contreras. Big night against the Brewers. Have him on your list tomorrow.
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import collections import os from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError from ansible.module_utils.six import iteritems, binary_type, text_type from ansible.playbook.attribute import FieldAttribute from ansible.playbook.base import Base from ansible.playbook.become import Become from ansible.playbook.conditional import Conditional from ansible.playbook.helpers import load_list_of_blocks from ansible.playbook.role.metadata import RoleMetadata from ansible.playbook.taggable import Taggable from ansible.plugins.loader import get_all_plugin_loaders from ansible.utils.vars import combine_vars __all__ = ['Role', 'hash_params'] # TODO: this should be a utility function, but can't be a member of # the role due to the fact that it would require the use of self # in a static method. This is also used in the base class for # strategies (ansible/plugins/strategy/__init__.py) def hash_params(params): """ Construct a data structure of parameters that is hashable. This requires changing any mutable data structures into immutable ones. We chose a frozenset because role parameters have to be unique. .. warning:: this does not handle unhashable scalars. Two things mitigate that limitation: 1) There shouldn't be any unhashable scalars specified in the yaml 2) Our only choice would be to return an error anyway. """ # Any container is unhashable if it contains unhashable items (for # instance, tuple() is a Hashable subclass but if it contains a dict, it # cannot be hashed) if isinstance(params, collections.Container) and not isinstance(params, (text_type, binary_type)): if isinstance(params, collections.Mapping): try: # Optimistically hope the contents are all hashable new_params = frozenset(params.items()) except TypeError: new_params = set() for k, v in params.items(): # Hash each entry individually new_params.update((k, hash_params(v))) new_params = frozenset(new_params) elif isinstance(params, (collections.Set, collections.Sequence)): try: # Optimistically hope the contents are all hashable new_params = frozenset(params) except TypeError: new_params = set() for v in params: # Hash each entry individually new_params.update(hash_params(v)) new_params = frozenset(new_params) else: # This is just a guess. new_params = frozenset(params) return new_params # Note: We do not handle unhashable scalars but our only choice would be # to raise an error there anyway. return frozenset((params,)) class Role(Base, Become, Conditional, Taggable): _delegate_to = FieldAttribute(isa='string') _delegate_facts = FieldAttribute(isa='bool', default=False) def __init__(self, play=None, from_files=None): self._role_name = None self._role_path = None self._role_params = dict() self._loader = None self._metadata = None self._play = play self._parents = [] self._dependencies = [] self._task_blocks = [] self._handler_blocks = [] self._default_vars = dict() self._role_vars = dict() self._had_task_run = dict() self._completed = dict() if from_files is None: from_files = {} self._from_files = from_files super(Role, self).__init__() def __repr__(self): return self.get_name() def get_name(self): return self._role_name @staticmethod def load(role_include, play, parent_role=None, from_files=None): if from_files is None: from_files = {} try: # The ROLE_CACHE is a dictionary of role names, with each entry # containing another dictionary corresponding to a set of parameters # specified for a role as the key and the Role() object itself. # We use frozenset to make the dictionary hashable. params = role_include.get_role_params() if role_include.when is not None: params['when'] = role_include.when if role_include.tags is not None: params['tags'] = role_include.tags if from_files is not None: params['from_files'] = from_files if role_include.vars: params['vars'] = role_include.vars hashed_params = hash_params(params) if role_include.role in play.ROLE_CACHE: for (entry, role_obj) in iteritems(play.ROLE_CACHE[role_include.role]): if hashed_params == entry: if parent_role: role_obj.add_parent(parent_role) return role_obj r = Role(play=play, from_files=from_files) r._load_role_data(role_include, parent_role=parent_role) if role_include.role not in play.ROLE_CACHE: play.ROLE_CACHE[role_include.role] = dict() play.ROLE_CACHE[role_include.role][hashed_params] = r return r except RuntimeError: raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles", obj=role_include._ds) def _load_role_data(self, role_include, parent_role=None): self._role_name = role_include.role self._role_path = role_include.get_role_path() self._role_params = role_include.get_role_params() self._variable_manager = role_include.get_variable_manager() self._loader = role_include.get_loader() if parent_role: self.add_parent(parent_role) # copy over all field attributes, except for when and tags, which # are special cases and need to preserve pre-existing values for (attr_name, _) in iteritems(self._valid_attrs): if attr_name not in ('when', 'tags'): setattr(self, attr_name, getattr(role_include, attr_name)) current_when = getattr(self, 'when')[:] current_when.extend(role_include.when) setattr(self, 'when', current_when) current_tags = getattr(self, 'tags')[:] current_tags.extend(role_include.tags) setattr(self, 'tags', current_tags) # dynamically load any plugins from the role directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._role_path, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) # load the role's other files, if they exist metadata = self._load_role_yaml('meta') if metadata: self._metadata = RoleMetadata.load(metadata, owner=self, variable_manager=self._variable_manager, loader=self._loader) self._dependencies = self._load_dependencies() else: self._metadata = RoleMetadata() task_data = self._load_role_yaml('tasks', main=self._from_files.get('tasks')) if task_data: try: self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager) except AssertionError as e: raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=task_data, orig_exc=e) handler_data = self._load_role_yaml('handlers') if handler_data: try: self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader, variable_manager=self._variable_manager) except AssertionError as e: raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=handler_data, orig_exc=e) # vars and default vars are regular dictionaries self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True) if self._role_vars is None: self._role_vars = dict() elif not isinstance(self._role_vars, dict): raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) self._default_vars = self._load_role_yaml('defaults', main=self._from_files.get('defaults'), allow_dir=True) if self._default_vars is None: self._default_vars = dict() elif not isinstance(self._default_vars, dict): raise AnsibleParserError("The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) def _load_role_yaml(self, subdir, main=None, allow_dir=False): file_path = os.path.join(self._role_path, subdir) if self._loader.path_exists(file_path) and self._loader.is_directory(file_path): # Valid extensions and ordering for roles is hard-coded to maintain # role portability extensions = ['.yml', '.yaml', '.json'] # If no <main> is specified by the user, look for files with # extensions before bare name. Otherwise, look for bare name first. if main is None: _main = 'main' extensions.append('') else: _main = main extensions.insert(0, '') found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir) if found_files: data = {} for found in found_files: new_data = self._loader.load_from_file(found) if new_data and allow_dir: data = combine_vars(data, new_data) else: data = new_data return data elif main is not None: raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main)) return None def _load_dependencies(self): ''' Recursively loads role dependencies from the metadata list of dependencies, if it exists ''' deps = [] if self._metadata: for role_include in self._metadata.dependencies: r = Role.load(role_include, play=self._play, parent_role=self) deps.append(r) return deps # other functions def add_parent(self, parent_role): ''' adds a role to the list of this roles parents ''' if not isinstance(parent_role, Role): raise AnsibleAssertionError() if parent_role not in self._parents: self._parents.append(parent_role) def get_parents(self): return self._parents def get_default_vars(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain default_vars = dict() for dep in self.get_all_dependencies(): default_vars = combine_vars(default_vars, dep.get_default_vars()) if dep_chain: for parent in dep_chain: default_vars = combine_vars(default_vars, parent._default_vars) default_vars = combine_vars(default_vars, self._default_vars) return default_vars def get_inherited_vars(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain inherited_vars = dict() if dep_chain: for parent in dep_chain: inherited_vars = combine_vars(inherited_vars, parent._role_vars) return inherited_vars def get_role_params(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain params = {} if dep_chain: for parent in dep_chain: params = combine_vars(params, parent._role_params) params = combine_vars(params, self._role_params) return params def get_vars(self, dep_chain=None, include_params=True): dep_chain = [] if dep_chain is None else dep_chain all_vars = self.get_inherited_vars(dep_chain) for dep in self.get_all_dependencies(): all_vars = combine_vars(all_vars, dep.get_vars(include_params=include_params)) all_vars = combine_vars(all_vars, self.vars) all_vars = combine_vars(all_vars, self._role_vars) if include_params: all_vars = combine_vars(all_vars, self.get_role_params(dep_chain=dep_chain)) return all_vars def get_direct_dependencies(self): return self._dependencies[:] def get_all_dependencies(self): ''' Returns a list of all deps, built recursively from all child dependencies, in the proper order in which they should be executed or evaluated. ''' child_deps = [] for dep in self.get_direct_dependencies(): for child_dep in dep.get_all_dependencies(): child_deps.append(child_dep) child_deps.append(dep) return child_deps def get_task_blocks(self): return self._task_blocks[:] def get_handler_blocks(self, play, dep_chain=None): block_list = [] # update the dependency chain here if dep_chain is None: dep_chain = [] new_dep_chain = dep_chain + [self] for dep in self.get_direct_dependencies(): dep_blocks = dep.get_handler_blocks(play=play, dep_chain=new_dep_chain) block_list.extend(dep_blocks) for task_block in self._handler_blocks: new_task_block = task_block.copy() new_task_block._dep_chain = new_dep_chain new_task_block._play = play block_list.append(new_task_block) return block_list def has_run(self, host): ''' Returns true if this role has been iterated over completely and at least one task was run ''' return host.name in self._completed and not self._metadata.allow_duplicates def compile(self, play, dep_chain=None): ''' Returns the task list for this role, which is created by first recursively compiling the tasks for all direct dependencies, and then adding on the tasks for this role. The role compile() also remembers and saves the dependency chain with each task, so tasks know by which route they were found, and can correctly take their parent's tags/conditionals into account. ''' block_list = [] # update the dependency chain here if dep_chain is None: dep_chain = [] new_dep_chain = dep_chain + [self] deps = self.get_direct_dependencies() for dep in deps: dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain) block_list.extend(dep_blocks) for idx, task_block in enumerate(self._task_blocks): new_task_block = task_block.copy() new_task_block._dep_chain = new_dep_chain new_task_block._play = play if idx == len(self._task_blocks) - 1: new_task_block._eor = True block_list.append(new_task_block) return block_list def serialize(self, include_deps=True): res = super(Role, self).serialize() res['_role_name'] = self._role_name res['_role_path'] = self._role_path res['_role_vars'] = self._role_vars res['_role_params'] = self._role_params res['_default_vars'] = self._default_vars res['_had_task_run'] = self._had_task_run.copy() res['_completed'] = self._completed.copy() if self._metadata: res['_metadata'] = self._metadata.serialize() if include_deps: deps = [] for role in self.get_direct_dependencies(): deps.append(role.serialize()) res['_dependencies'] = deps parents = [] for parent in self._parents: parents.append(parent.serialize(include_deps=False)) res['_parents'] = parents return res def deserialize(self, data, include_deps=True): self._role_name = data.get('_role_name', '') self._role_path = data.get('_role_path', '') self._role_vars = data.get('_role_vars', dict()) self._role_params = data.get('_role_params', dict()) self._default_vars = data.get('_default_vars', dict()) self._had_task_run = data.get('_had_task_run', dict()) self._completed = data.get('_completed', dict()) if include_deps: deps = [] for dep in data.get('_dependencies', []): r = Role() r.deserialize(dep) deps.append(r) setattr(self, '_dependencies', deps) parent_data = data.get('_parents', []) parents = [] for parent in parent_data: r = Role() r.deserialize(parent, include_deps=False) parents.append(r) setattr(self, '_parents', parents) metadata_data = data.get('_metadata') if metadata_data: m = RoleMetadata() m.deserialize(metadata_data) self._metadata = m super(Role, self).deserialize(data) def set_loader(self, loader): self._loader = loader for parent in self._parents: parent.set_loader(loader) for dep in self.get_direct_dependencies(): dep.set_loader(loader)
Published 04/21/2019 08:48:00 am at 04/21/2019 08:48:00 am in Walmart Tv Wall Mount 55 Inch. walmart tv wall mount 55 inch vizio 55 inch tv wall mount inch creative wall mount wall mount wall stand ed stand vizio 55 inch tv wall mount walmart tv wall mounts howtoshoppinginfo walmart tv wall mounts wall mount in store wall mount stores walmart tv wall mount. walmart tv stands with mounts universal stands luxury swivel glass walmart tv stands with mounts stands stands inch interior console ideas stand askew white inch mount walmart tv stands with mounts , lcd led plasma flat tilt tv wall mount bracket inch angle lcd led plasma flat tilt tv wall mount bracket inch angle adjustable, vizio tv wall mount walmart wall mounts wall mounting the wall mount vizio tv wall mount walmart wall stand floating stand stand on wall floating stand wall mount vizio tv wall mount walmart , wall mount for tv wall mount stand for wall mounted stunning wall mount for tv wall mount stand for wall mounted stunning contemporary shelf mount best regarding stands inspirations wall mount wall mount inch tv , best full motion wall mount tv sanus walmart canada for most flat full size of full motion tv wall mount inch reviews sanus for princess auto kids , tv shelf mount wall mounted shelf interesting with drawer home tv , tv stand with mount walmart stands with mount stands mount stands tv stand with mount walmart corner mount corner stand with mount corner media cabinet with tv stand with mount walmart , walmart tv mount inch wall mounts for at walmart tv mount inch walmart tv mount inch inch wall mount articulating full motion tilt wall , inch tv stands with mount view a larger image of the transdeco inch tv stands with mount altra tv stand walmart awesome inch tv wall mount , inch adjustable movable folding universal tv stand inch adjustable movable folding universal tv stand pedestal base, tv wall mount at walmart custom mount a project blog mount corner tv wall mount at walmart stand mount wall shelves design wall mount stand with shelves soundbar tv wall mount at walmart .
#!/usr/bin/env python3 # Copyright (C) 2020 IBM Corp. # This program is Licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. See accompanying LICENSE file. import argparse import sys import ast import math def diff_float(na, nb, threshold): for a, b in zip(na, nb): if not math.isclose(a, b, abs_tol=threshold): raise ValueError(f"Difference {a - b} between {a} and {b} " f"exceeds threshold {threshold}.") def makeSameSize(a, b, max_length): lenA, lenB = len(a), len(b) if lenA > max_length or lenB > max_length: raise ValueError(f"Size of slots for {a}({lenA}) {b}({lenB}) " f"> {max_length}.") if lenA == lenB: return a, b else: maxSz = max(lenA, lenB) a += [0] * (maxSz - lenA) b += [0] * (maxSz - lenB) return (a, b) def parseCorrectly(la, lb, decrypt): error_msg = "Type mismatch. {0}({1}) and {2}({3}) type do not match." if decrypt: for a, b in zip(la, lb): a, b = ast.literal_eval(a), ast.literal_eval(b) if type(a) is not type(b): raise TypeError(error_msg.format(a, type(a), b, type(b))) yield a, b else: for a, b in zip(la, lb): a = [[ float(i) for i in a.split(",") ]] b = [[ float(i) for i in b.split(",") ]] if type(a) is not type(b): raise TypeError(error_msg.format(a, type(a), b, type(b))) yield a, b def main(): parser = argparse.ArgumentParser() parser.add_argument("firstfile", help="first data file", type=str) parser.add_argument("secondfile", help="second data file", type=str) parser.add_argument("--decrypt", help="diff decrypt format (instead of decode)", action='store_true') parser.add_argument("--threshold", help="error threshold [default=0.001]", type=float, default=0.001) args = parser.parse_args() with open(args.firstfile, 'r') as f1, open(args.secondfile, 'r') as f2: l1, l2 = list(f1), list(f2) if len(l1) != len(l2): sys.exit(f"Different number of lines. " f"First contains {len(l1)} second contains {len(l2)}.") if l1[0] != l2[0]: sys.exit(f"File headers differ. {l1[0]} {l2[0]}.") try: for a, b in parseCorrectly(l1[1:], l2[1:], args.decrypt): for sa, sb in zip(a, b): sa, sb = makeSameSize(sa, sb, 2) diff_float(sa, sb, args.threshold) except (TypeError, ValueError) as e: sys.exit(str(e)) if __name__ == "__main__": main()
The Department of Drama in the College of Humanities at the University of Exeter are looking for a new Professor. Drama at Exeter is internationally-renowned for its practice, research and teaching, and will celebrate its 50th anniversary in 2018. It has had notable success in achieving external research income, including several large AHRC awards, Creative Fellow and Academic Fellowship awards, and workshop and practice-based Research Council projects. We foster a strong culture of peer support in relation to staff research, hosting three Research Centres within the Department: the Centre for Contemporary Performance Practices; the Centre for Performance Histories and Cultures; and the Centre for Performance, Science and Community. All eligible Drama staff were submitted to REF2014, in which 95% of our research was graded at internationally recognised levels. Exeter Drama has performed consistently highly in the NSS, and is currently in the top 3 in The Times and Sunday Times Good University Guide 2017. We are one of the largest and best equipped Drama departments in the UK with over twenty permanent staff, eleven practice studios, and over a dozen other seminar, workshop and multimedia spaces. We offer a supportive and stimulating environment, and are well connected internationally, through our split-site programme with NIAS, Bangalore and staff and student exchanges with Austin, Texas. There is a strong tradition of Practice-as-Research. We collaborate with organisations such as the Globe Theatre, the National Theatre, the Drottningholm Theatre, regional and national theatre companies, and public sector bodies such as hospitals, prisons and schools. Researchers also have strong links with the wider creative economy sector, and graduate companies include Arts Council England National Portfolio Organisations, such as Forced Entertainment, Punchdrunk and Theatre Alibi. The Department of Drama in the College of Humanities at the University of Exeter are looking for a leading international figure with the ability to attract world-class academics and post-graduate research students. Applicants will be innovative researchers with a strong track record of research funding and international quality publications.The Professor in Drama, Theatre or Performance Studies will contribute to extending the research profile of Drama at Exeter, and to its dynamic teaching environment. We are particularly interested in enhancing leadership in areas related or complementary to international performance practice; or applied, community and socially-engaged performance. The University of Exeter is a Russell Group University in the top one percent of institutions globally. In the last few years we have invested strategically to deliver more than GBP 350 million worth of new facilities across our campuses with plans for significant investment in the future. This full-time post is available from 1 September 2017 on a permanent basis. Applicants are encouraged to contact the Head of Discipline, Professor Stephen Hodge (tel: 01392 724524, email: S.Hodge@exeter.ac.uk) to discuss the post further. You may also wish to consult our web site at http://humanities.exeter.ac.uk for further details of the College. The closing date for completed applications is Sunday 15 January 2017. Shortlisting is expected to take place on Tuesday 24 January 2017 with interviews likely to take place in mid-February 2017.
#!/usr/bin/env python3 # -*- coding: UTF-8 -*- ############################################################################ # # keyboard_list_generator.py # ############################################################################ # # Author: Videonauth <videonauth@googlemail.com> # Date: 09.07.2016 # Purpose: # Generate a word-list for the keyboard sequence check. # Written for: Python 3.5.1 # ############################################################################ de_lowercase = "qwertzuiopü+asdfghjkllöä#yxcvbnm,.-" de_uppercase = "°!§$%&/()=?WERTZUIOPÜ*ASDFGHJKLÖÄ'YXCVBNM;:_" en_lowercase = "-=qwertyuiop[]asdfghjkl;'zxcvbnm,./" en_uppercase = "~!@#$%^&*()_QWERTYUIOP{}ASDFGHJKL:ZXCVBNM<>?" # next line might error out if destination file does not exist with open('../lists/keyboard.wl', 'r+') as file: for a in range(3, len(de_lowercase) + 1): for b in range(len(de_lowercase)): if len(de_lowercase[b: b + a]) == a: file.write(de_lowercase[b: b + a] + '\n') for a in range(3, len(de_uppercase) + 1): for b in range(len(de_uppercase)): if len(de_uppercase[b: b + a]) == a: file.write(de_uppercase[b: b + a] + '\n') for a in range(3, len(en_lowercase) + 1): for b in range(len(en_lowercase)): if len(en_lowercase[b: b + a]) == a: file.write(en_lowercase[b: b + a] + '\n') for a in range(3, len(en_uppercase) + 1): for b in range(len(en_uppercase)): if len(en_uppercase[b: b + a]) == a: file.write(en_uppercase[b: b + a] + '\n') de_lowercasere = de_lowercase[:: -1] de_uppercasere = de_uppercase[:: -1] en_lowercasere = en_lowercase[:: -1] en_uppercasere = en_uppercase[:: -1] for a in range(3, len(de_lowercasere) + 1): for b in range(len(de_lowercasere)): if len(de_lowercasere[b: b + a]) == a: file.write(de_lowercasere[b: b + a] + '\n') for a in range(3, len(de_uppercasere) + 1): for b in range(len(de_uppercasere)): if len(de_uppercasere[b: b + a]) == a: file.write(de_uppercasere[b: b + a] + '\n') for a in range(3, len(en_lowercasere) + 1): for b in range(len(en_lowercasere)): if len(en_lowercasere[b: b + a]) == a: file.write(en_lowercasere[b: b + a] + '\n') for a in range(3, len(en_uppercasere) + 1): for b in range(len(en_uppercasere)): if len(en_uppercasere[b: b + a]) == a: file.write(en_uppercasere[b: b + a] + '\n') file.close()
A new study says sleep trackers might be making you sleep poorly. Here's what that really means. A New Study Says Sleep Trackers Might Be Making You Sleep Poorly. Here’s What That Really Means. Long gone are the days of having to calculate how many hours of sleep you got last night. Between fitness trackers, downloadable apps, and the health kits that come on our smartphones, pretty much everyone in the modern world has access to sleep data—how long they slept, and how well—right at their fingertips. But could all that data actually make your sleep worse? That’s the implication from a new study published in the Journal of Clinical Sleep Medicine. Researchers at Rush University highlighted three case studies from their sleep lab, including a 39-year-old man who bought a sleep tracker after having trouble getting a good night’s sleep. As soon as he began checking the tracker on his goal of logging at least eight hours of sleep every night, though, he actually started to felt worse than before he had the facts at his fingertips. Similarly, a 27-year-old woman complained of feeling “unrefreshed” after waking up from what her device said was a poor night’s sleep, but, once in a laboratory setting separated from her tracker, she actually sleep soundly and in a state of deep sleep. Baron readily admits trackers are certainly not all bad. And for healthy sleepers, trackers can indeed help raise awareness about sleep, says Jessica Payne, Ph.D., associate professor of psychology who runs the Sleep, Stress, and Memory Lab at the University of Notre Dame, who wasn’t involved in the study. “Just like if you learn that, at the end of the day, you’ve only walked 5000 steps, finding that you’ve only slept five or six hours can motivate you to do better the next day,” Payne explains. It’s when people start obsessing over the data that trackers become counterproductive. And even though this study was small, highlighting only three case studies, but both experts say this anxiety from data is a pattern they see in their clinical practice and research. The problem is this: When armed with the most detailed data on your sleep, you start stressing about not getting enough shuteye, which intensifies what Payne calls the “sleep, stress snowball”—the more stressed you are, the less or more poorly you sleep. In turn, the more sleep deprived you are, the more stress your brain and body is under. “That’s a powerful negative association that can be difficult to unlearn,” Payne adds. What’s more, your sleep is controlled by homeostatic sleep pressure, Baron explains, which basically says the longer you’re awake, the sleepier you are. Contrarily, the longer someone spends in bed attempting to snooze (which often happens with insomniacs), the more people have difficulty falling asleep, the more frequently they wake up, and the harder of a time they have getting back to dreamland. If you love your tracker, we’re certainly not saying toss it aside. After all, we hear countless stories about guys who didn’t realize they were in that health- and hormone-compromising zone of under six hours a night until they started checking a tracker. But take sleep data with a grain of salt, because, in reality, it’s not steadfastly reliable. “I’ve tried most trackers and wear one myself,” Baron says. “I find the data to be motivating for my sleep and steps—they just aren’t very accurate about measuring sleep.” She’s seen trackers err by up to 45 minutes when compared to lab measurements. It’s really the focus on a specific number that’s most often at blame for sleep trackers screwing with people’s sleep, Baron says. “Don’t fixate on making it perfect each day—instead, look for the overall trend toward a consistent sleep schedule,” she says.
from matplotlib.colors import LightSource # Adapted from https://github.com/jobar8/graphics def alpha_blend(rgb, intensity, alpha=0.7): return alpha * rgb + (1 - alpha) * intensity def get_hs(data, cmap, norm=None, zf=10, azdeg=315, altdeg=45, dx=1, dy=1, fraction=1.5, blend_mode='alpha', alpha=0.7, **kwargs_norm): ls = LightSource(azdeg, altdeg) if blend_mode == 'alpha': # transparency blending rgb = ls.shade(data, cmap=cmap, norm=norm, blend_mode=alpha_blend, vert_exag=zf, dx=dx, dy=dy, fraction=fraction, alpha=alpha, **kwargs_norm) else: rgb = ls.shade(data, cmap=cmap, norm=norm, blend_mode=blend_mode, vert_exag=zf, dx=dx, dy=dy, fraction=fraction, **kwargs_norm) return rgb
With such a vast architectural, historical and cultural heritage, India has much to offer travellers, however the process of obtaining an Indian tourist visa has, until now, been considered a formidable task. Martin Randall Travel, along with many others in the travel industry, has welcomed the news of the extension of the e-visa to British citizens, which will make obtaining an Indian visa simpler, quicker and less expensive. Visitors from countries all over the globe including Australia, Canada, the USA and, since 15th August 2015, the UK can now apply for the e-visa which costs £39 plus a small administration fee, and apply, pay and upload documents online. The visa can be applied for 30 days in advance of entry and is valid for a stay of up to 30 days, but cannot be extended. James Palmer, Operations Executive for our India tours, says “This is a very encouraging development for British nationals travelling to India. It is no longer necessary to surrender one's passport before departure or go through the tedious process of queuing at a visa centre”.
# -*- coding: utf-8 -*- ''' ____ ___ ____________ ___ ___ ____ _________________ / __ \/ _ | / __/ _/ __/ / _ \/ _ \/ __ \__ / / __/ ___/_ __/ / /_/ / __ |_\ \_/ /_\ \ / ___/ , _/ /_/ / // / _// /__ / / \____/_/ |_/___/___/___/ /_/ /_/|_|\____/\___/___/\___/ /_/ Operational Aid Source for Infra-Structure Created on 2020. 3. 18.. @author: Hye-Churn Jang, CMBU Specialist in Korea, VMware [jangh@vmware.com] ''' #=============================================================================== # Prepare PostgreSQL Server #=============================================================================== # docker run --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=password -e POSTGRES_USER=pygics -e POSTGRES_DB=pygicsdb -d postgres from pygics import load, logInfo load('modules.postgres') # Login Database SDK.PygicsDB.system('localhost:5432', 'pygics', 'password') # "User" Table at "PygicsDB" Database User = SDK.PygicsDB.User logInfo('Create Users') with SDK.PygicsDB: # Open Transaction for Create Records User('Tony', 'Tony Stark', 'IronMan') User('Peter', 'Peter Parker', 'SpiderMan') User('Peter', 'Peter Pan', 'Elf') logInfo('Get All Users\n{}'.format(User.list())) # query form based SQLAlchemy logInfo('Find All Peters\n{}'.format(User.list(User.name == 'Peter', order='id'))) with SDK.PygicsDB: # Open Transaction tony = User.list(User.name == 'Tony')[0] tony.nickname = 'Avengers Leader' # Update Data tony.update() logInfo('Check Tony Changed\n{}'.format(User.list(User.name == 'Tony'))) logInfo('Delete All Users') with SDK.PygicsDB: # Open Transaction for Delete for user in User.list(): user.delete() logInfo('Check Users Empty\n{}'.format(User.list()))
Feel like home at our free ringtones website, where you can aquire the most popular ringtones for your phone. Today we have a ringtone of “Patrick ryan” by Symphony - Patrick Ryan as well as 2 other ringtones by Symphony - Patrick Ryan. You can listen to all 2 Symphony - Patrick Ryan – “Patrick ryan” ringtone variations available here and download it for free to your PC. You can also examine other albums produced by Symphony - Patrick Ryan for all time. “Patrick ryan” is referred to , and Audiko offer a huge amount of songs absolutely for free. And finally, we have collected lyrics for this track and you can view it here as well. So feel free to browse, listen, download ringtones at Audiko.net. Make your phone sound cool! Download Symphony - Patrick Ryan by Patrick ryan at Audiko fast and FREE!
''' A bunch of support functions used for SCG optimisation. They depend on the parallel implementation framework, but may change for other optimisers. ''' import glob import time import numpy from os.path import splitext from local_MapReduce import load, save time_acc = { 'embeddings_set_grads' : [], 'embeddings_get_grads_mu' : [], 'embeddings_get_grads_kappa' : [], 'embeddings_get_grads_theta' : [], 'embeddings_get_grads_current_grad' : [], 'embeddings_get_grads_gamma' : [], 'embeddings_get_grads_max_d' : [], 'embeddings_set_grads_reset_d' : [], 'embeddings_set_grads_update_d' : [], 'embeddings_set_grads_update_X' : [], 'embeddings_set_grads_update_grad_old' : [], 'embeddings_set_grads_update_grad_new' : [], } ''' Initialisation for local statistics ''' def embeddings_set_grads(folder): ''' Sets the grads and other local statistics often needed for optimisation locally for each node. This is currently only implemented locally, but could easly be adapted to the MapReduce framework to be done on remote nodes in parallel. There's no real need to do this in parallel though, as the computaions taking place are not that time consuming. ''' global time_acc start = time.time() input_files = sorted(glob.glob(folder + '/*.grad_latest.npy')) for file_name in input_files: grads = load(file_name) #print 'grads' #print grads # Save grad new as the latest grad evaluated new_file = splitext(splitext(file_name)[0])[0] + '.grad_new.npy' save(new_file, grads) # Init the old grad to be grad new new_file = splitext(splitext(file_name)[0])[0] + '.grad_old.npy' save(new_file, grads) # Save the direction as the negative grad new_file = splitext(splitext(file_name)[0])[0] + '.grad_d.npy' save(new_file, -1 * grads) end = time.time() time_acc['embeddings_set_grads'] += [end - start] ''' Getters for local statistics ''' def embeddings_get_grads_mu(folder): ''' Get the sum over the inputs of the inner product of the direction and grad_new ''' global time_acc start = time.time() mu = 0 grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_new_file, grad_d_file in zip(grad_new_files, grad_d_files): grad_new = load(grad_new_file) grad_d = load(grad_d_file) mu += (grad_new * grad_d).sum() end = time.time() time_acc['embeddings_get_grads_mu'] += [end - start] return mu def embeddings_get_grads_kappa(folder): ''' Get the sum over the inputs of the inner product of the direction with itself ''' global time_acc start = time.time() kappa = 0 grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_d_file in grad_d_files: grad_d = load(grad_d_file) kappa += (grad_d * grad_d).sum() end = time.time() time_acc['embeddings_get_grads_kappa'] += [end - start] return kappa def embeddings_get_grads_theta(folder): ''' Get the sum over the inputs of the inner product of the direction and grad_latest ''' global time_acc start = time.time() theta = 0 grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) grad_latest_files = sorted(glob.glob(folder + '/*.grad_latest.npy')) grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_latest_file, grad_d_file, grad_new_file in zip(grad_latest_files, grad_d_files, grad_new_files): grad_latest = load(grad_latest_file) grad_new = load(grad_new_file) grad_d = load(grad_d_file) theta += (grad_d * (grad_latest - grad_new)).sum() end = time.time() time_acc['embeddings_get_grads_theta'] += [end - start] return theta def embeddings_get_grads_current_grad(folder): ''' Get the sum over the inputs of the inner product of grad_new with itself ''' global time_acc start = time.time() current_grad = 0 grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) for grad_new_file in grad_new_files: grad_new = load(grad_new_file) current_grad += (grad_new * grad_new).sum() end = time.time() time_acc['embeddings_get_grads_current_grad'] += [end - start] return current_grad def embeddings_get_grads_gamma(folder): ''' Get the sum over the inputs of the inner product of grad_old and grad_new ''' global time_acc start = time.time() gamma = 0 grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) grad_old_files = sorted(glob.glob(folder + '/*.grad_old.npy')) for grad_new_file, grad_old_file in zip(grad_new_files, grad_old_files): grad_new = load(grad_new_file) grad_old = load(grad_old_file) gamma += (grad_new * grad_old).sum() end = time.time() time_acc['embeddings_get_grads_gamma'] += [end - start] return gamma def embeddings_get_grads_max_d(folder, alpha): ''' Get the max abs element of the direction over all input files ''' global time_acc start = time.time() max_d = 0 grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_d_file in grad_d_files: grad_d = load(grad_d_file) max_d = max(max_d, numpy.max(numpy.abs(alpha * grad_d))) end = time.time() time_acc['embeddings_get_grads_max_d'] += [end - start] return max_d ''' Setters for local statistics ''' def embeddings_set_grads_reset_d(folder): ''' Reset the direction to be the negative of grad_new ''' global time_acc start = time.time() input_files = sorted(glob.glob(folder + '/*.grad_new.npy')) for file_name in input_files: grads = load(file_name) # Save the direction as the negative grad new_file = splitext(splitext(file_name)[0])[0] + '.grad_d.npy' save(new_file, -1 * grads) end = time.time() time_acc['embeddings_set_grads_reset_d'] += [end - start] def embeddings_set_grads_update_d(folder, gamma): ''' Update the value of the direction for each input to be gamma (given) times the old direction minus grad_new ''' global time_acc start = time.time() grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_new_file, grad_d_file in zip(grad_new_files, grad_d_files): grad_new = load(grad_new_file) grad_d = load(grad_d_file) save(grad_d_file, gamma * grad_d - grad_new) end = time.time() time_acc['embeddings_set_grads_update_d'] += [end - start] def embeddings_set_grads_update_X(folder, alpha): ''' Update the value of the local embeddings and variances themselves to be X + alpha * direction ''' global time_acc start = time.time() grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) X_mu_files = sorted(glob.glob(folder + '/*.embedding.npy')) X_S_files = sorted(glob.glob(folder + '/*.variance.npy')) for grad_d_file, X_mu_file, X_S_file in zip(grad_d_files, X_mu_files, X_S_files): grad_d = load(grad_d_file) grad_d_X_mu = grad_d[0] grad_d_X_S = grad_d[1] X_mu = load(X_mu_file) X_S = load(X_S_file) #print 'X_mu' #print X_mu #print 'X_S' #print X_S save(X_mu_file, X_mu + alpha * grad_d_X_mu) save(X_S_file, X_S + alpha * grad_d_X_S) end = time.time() time_acc['embeddings_set_grads_update_X'] += [end - start] def embeddings_set_grads_update_grad_old(folder): ''' Set grad_old to be grad_new ''' global time_acc start = time.time() input_files = sorted(glob.glob(folder + '/*.grad_new.npy')) for file_name in input_files: grads = load(file_name) # Save grad old as latest grad new new_file = splitext(splitext(file_name)[0])[0] + '.grad_old.npy' save(new_file, grads) end = time.time() time_acc['embeddings_set_grads_update_grad_old'] += [end - start] def embeddings_set_grads_update_grad_new(folder): ''' Set grad_new to be grad_latest (a temp grad that keeps changing every evaluation) ''' global time_acc start = time.time() input_files = sorted(glob.glob(folder + '/*.grad_latest.npy')) for file_name in input_files: grads = load(file_name) # Save grad old as latest grad new new_file = splitext(splitext(file_name)[0])[0] + '.grad_new.npy' save(new_file, grads) end = time.time() time_acc['embeddings_set_grads_update_grad_new'] += [end - start]
For years, Sumiko has provided the market with wonderful sounding cartridges. With moving magnet designs, as well as low and high output moving coils, Sumiko has a solution for just about anyone's needs. At the top of their line lies the flagship Palo Santos Presentation. To say that it's a departure for them is an understatement. Sure, the body looks similar to the Celebration II, but that's where the similarities end. The Palo Santos has by far the smoothest sound of any Sumiko cartridge I've heard. The details in the music are present, but never overbearing. Bass is round, and highs are gentle. The midrange blooms like a rose, and vocals are immediate. Until now, Sumiko cartridges have been known for being bright and lively, if a little tipped up. The Palo Santos is the exact opposite, offering one of the more romantic sounds on the market today. This cartridge is truly for music lovers.
#!usr/bin/python # -*- coding: utf-8 -*- __plugins__ = ('BwLuminosity', 'BwGreen', 'BwOrange', 'BwRed', 'BwYellow', 'BwInfrared') __version__ = '2011-03-20' __author__ = 'Karol Będkowski' __copyright__ = "Copyright (c) Karol Będkowski, 2011" import ImageOps from photomagick.common import colors from photomagick.common.base_filter import BaseFilter from photomagick.common.const import CATEGORY_BASE class BwLuminosity(BaseFilter): STEPS = 3 NAME = _("BW Luminosity") CATEGORY = CATEGORY_BASE def process(self, image): yield 'Start...', image image = colors.convert_to_luminosity(image) yield 'Contrast...', image image = ImageOps.autocontrast(image) yield 'Done', image class _BwFilter(BaseFilter): STEPS = 3 NAME = 'BW Filter' CATEGORY = CATEGORY_BASE _COLOR = (1, 1, 1) def process(self, image): yield 'Start...', image image = colors.color_mixer_monochrome(image, *self._COLOR) yield 'Contrast...', image image = ImageOps.autocontrast(image) yield 'Done', image class BwGreen(_BwFilter): NAME = _('BW Green Filter') _COLOR = 0.04, 0.27, 0.08 class BwOrange(_BwFilter): NAME = _('BW Orange Filter') _COLOR = (0.31, 0.09, 0) class BwRed(_BwFilter): NAME = _('BW Red Filter') _COLOR = (0.35, 0.04, 0) class BwYellow(_BwFilter): NAME = _('BW Yellow Filter') _COLOR = (0.24, 0.11, 0.05) class BwInfrared(_BwFilter): NAME = _('BW Infrared') _COLOR = (0.15, 1.15, -0.30)
Consultant – The European Consultancy Hub. Control over your destiny and increased earning potential. Isn’t that why you decided to be a consultant? You have the freedom to travel the globe and seek the most fulfilling and financially rewarding role. You may just be starting out or have honed a skill-set over the years to pitch for high-profile boardroom roles. Your network is everything to you, to ensure that you remain ahead of the curve.
# Copyright (C) 2016 Reed Anderson. # From: https://github.com/ReedAnders/deepmap # License: MIT BY https://opensource.org/licenses/MIT import pickle, os, binascii from collections import deque import numpy as np from math import exp from random import random class NodeMap: def __init__(self, input_node_population=12, output_node_population=1, latent_node_population=400): self.coordinate_map = [] self.input_nodes = [InputNode() for node in range(input_node_population)] self.output_nodes = [OutputNode() for node in range(output_node_population)] self.latent_nodes = [LatentNode() for node in range(latent_node_population)] self.all_nodes = self.input_nodes + self.output_nodes + self.latent_nodes def construct_map(self): for node in self.all_nodes: self.coordinate_map.append((node.name, node.coordinates)) for node in self.all_nodes: node.find_neighbors(self.coordinate_map) self.update_input_values() # pickle.dump( self.coordinate_map, open( "pickles/coordinate_map.p", "wb" ) ) # pickle.dump( self.input_nodes, open( "pickles/input_nodes.p", "wb" ) ) # pickle.dump( self.output_nodes, open( "pickles/output_nodes.p", "wb" ) ) # pickle.dump( self.latent_nodes, open( "pickles/latent_nodes.p", "wb" ) ) def calculate_dimensions(self): n_params = 0 for node in self.all_nodes: n_params += 2 n_params += len(node.true_neighbor_index) return n_params def error(self, correct_labels, predicted_labels): error = None pattern_error = [] n_training_patterns = len(correct_labels) for i in range(n_training_patterns): _sum = sum([(y-o)**2 for y,o in zip(correct_labels, predicted_labels)]) pattern_error.append(_sum) error = 1.0/n_training_patterns * sum(pattern_error) return error def train(self, training_patterns, param): n_training_patterns = len(training_patterns) for i in training_patterns: n_labels = len(self.output_nodes) inputs = i[:-n_labels] c_labels = i[-n_labels:] p_labels = self.evaluate_topology(inputs, param) error = self.error(c_labels, p_labels) fitness = 1 - error print 'ERROR: %r' % (error) return error, fitness def evaluate_topology(self, data, param): p_labels = [] for index, node in enumerate(self.input_nodes): node.value = float(data[index]) # Trim parameters p_len = len(param) t_len = len(self.latent_nodes + self.output_nodes) * 2 w_len = p_len - t_len w_para = param[:w_len] # t_para = deque(param[w_len-2:]) # Evaluate function for node in self.latent_nodes + self.output_nodes: self.evaluate_weights(w_para) t_para = deque(param[w_len-2:]) # for node in self.latent_nodes + self.output_nodes: # node_topo_params = [t_para.popleft() for _i in range(2)] # node.eval_neighbors(node_topo_params[0],node_topo_params[1]) # Return predicted labels p_labels = [node.value for node in self.output_nodes] return p_labels def evaluate_weights(self, param): w_para = deque(param) for node in self.latent_nodes + self.output_nodes: neighbors = len(node.true_neighbor_index) node_weight_params = [w_para.popleft() for _i in range(neighbors)] node.eval_sigmoid(node_weight_params) self.update_input_values() def update_input_values(self): for node in self.output_nodes + self.latent_nodes: for index in node.true_neighbor_index: node.input_values.append(self.all_nodes[index].value) class Node: def __init__(self, dimensions=3): self.name = binascii.b2a_hex(os.urandom(8)) self.coordinates = np.array([random() for i in range(dimensions)]) self.neighbors = [] self.true_neighbor_index = [] self.optimal_neighbor_set = set() self.value = 0.0 def find_neighbors(self, coordinate_map): for index, node in enumerate(coordinate_map): if np.linalg.norm(self.coordinates-node[1]) < 0.3: self.true_neighbor_index.append(index) self.neighbors.append((node,True)) else: self.neighbors.append((node,False)) # Two parameters between -1, 1 def eval_neighbors(self, lower_bound, upper_bound): for index in self.true_neighbor_index: dist = np.linalg.norm(self.coordinates-self.neighbors[index][0][1]) if dist > lower_bound and dist < upper_bound: self.optimal_neighbor_set.add(index) class InputNode(Node): def __init__(self): Node.__init__(self) class LatentNode(Node): def __init__(self): Node.__init__(self) self.value = random() self.input_values = [] # Multiple parameters for n weights -1, 1 def eval_sigmoid(self, weights): x = sum([w*v for w,v in zip(weights, self.input_values)]) self.value = 1 / (1 + exp(-x)) class OutputNode(LatentNode): def __init__(self): LatentNode.__init__(self)
We had worked with different business owners in Camp Meeker area, and 99% have been satisfied with our service for 10 or even 20 years. We are always on time in delivering your construction dumpster in Camp Meeker, CA since we know that you follow a strict schedule. You can be sure that your dumpster will arrive to the desired location before you start your project. Call us now at 888-609-4426 to request a free quote. We do not only provide you with quality dumpster for rent in Camp Meeker, CA, but we also offer the most competitive rates. You don’t need to look any further; we have the best and cost-effective dumpster in Camp Meeker. Most other dumpster rental companies offer very attractive prices, but this can be a trick – in the final bill you may see additional taxing and fees. We don’t do job accordingly – you pay exactly the amount you see in the initial quote. If you get in touch with us, you will get fast and reliable service as well as on-time pickup of your construction dumpster in Camp Meeker, CA. You can call us and our friendly staff will gladly assist you in choosing the most suitable dumpster for your project. You can choose several dumpsters of different size for some waste types; e.g. 10 square feet volume for metal scrap and 40 square feet volume for the lightweight garbage. We service the Camp Meeker area for 20 years, and we know what exactly every contractor, home owner and property manager needs. We also provide easy payment methods which include check, cash and major credit cards. We work closely with any client and we always base our price quote on the amount and type of material you will be putting on the containers. Our construction dumpsters in Camp Meeker, CA can be delivered to your location, residential driveway or anything in between (street, parking lots, etc.). We want to help you in growing your business!
#!/usr/bin/env python """ Copyright 2016 Aaron Stephens <aaronjst93@gmail.com> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # CPU Types, CPU Subtypes, Filetypes, Load Commands, Flags as defined in the # following official Apple, inc. header files: # /usr/include/mach/machine.h # /usr/include/mach-o/loader.h cert_slots = { -1: 'root', 0: 'leaf' } hashes = { 0: 'No Hash', 1: 'SHA-1', 2: 'SHA-256' } segment_flags = { 1: 'HIGHVM', 2: 'FVMLIB', 4: 'NORELOC', 8: 'PROTECTED_VERSION_1' } n_types = { 0x0: 'UNDF', 0x2: 'ABS', 0xe: 'SECT', 0xc: 'PBUD', 0xa: 'INDR' } machos = { 4277009102: (False, False), # 32 bit, big endian 4277009103: (True, False), # 64 bit, big endian 3472551422: (False, True), # 32 bit, little endian 3489328638: (True, True) # 64 bit, little endian } requirements = { 1: 'HostRequirementType', 2: 'GuestRequirementType', 3: 'DesignatedRequirementType', 4: 'LibraryRequirementType', 5: 'PluginRequirementType', } indeces = { 0: 'CodeDirectorySlot', 1: 'InfoSlot', 2: 'RequirementsSlot', 3: 'ResourceDirSlot', 4: 'ApplicationSlot', 5: 'EntitlementSlot', 0x10000: 'SignatureSlot' } matches = { 0: 'matchExists', 1: 'matchEqual', 2: 'matchContains', 3: 'matchBeginsWith', 4: 'matchEndsWith', 5: 'matchLessThan', 6: 'matchGreaterThan', 7: 'matchLessEqual', 8: 'matchGreaterEqual' } protections = { 0b000: '---', 0b001: 'r--', 0b010: '-w-', 0b011: 'rw-', 0b100: '--x', 0b101: 'r-x', 0b110: '-wx', 0b111: 'rwx' } signatures = { 'REQUIREMENT': 0xfade0c00, 'REQUIREMENTS': 0xfade0c01, 'CODEDIRECTORY': 0xfade0c02, 'ENTITLEMENT': 0xfade7171, 'BLOBWRAPPER': 0xfade0b01, 'EMBEDDED_SIGNATURE': 0xfade0cc0, 'DETACHED_SIGNATURE': 0xfade0cc1, 'CODE_SIGN_DRS': 0xfade0c05 } section_attrs = { 0x80000000: 'PURE_INSTRUCTIONS', 0x40000000: 'NO_TOC', 0x20000000: 'STRIP_STATIC_SYMS', 0x10000000: 'NO_DEAD_STRIP', 0x08000000: 'LIVE_SUPPORT', 0x04000000: 'SELF_MODIFYING_CODE', 0x02000000: 'DEBUG', 0x00000400: 'SOME_INSTRUCTIONS', 0x00000200: 'EXT_RELOC', 0x00000100: 'LOC_RELOC' } filetypes = { 1: 'OBJECT', 2: 'EXECUTE', 3: 'FVMLIB', 4: 'CORE', 5: 'PRELOAD', 6: 'DYLIB', 7: 'DYLINKER', 8: 'BUNDLE', 9: 'DYLIB_STUB', 10: 'DSYM', 11: 'KEXT_BUNDLE' } section_types = { 0x0: 'REGULAR', 0x1: 'ZEROFILL', 0x2: 'CSTRING_LITERALS', 0x3: '4BYTE_LITERALS', 0x4: '8BYTE_LITERALS', 0x5: 'LITERAL_POINTERS', 0x6: 'NON_LAZY_SYMBOL_POINTERS', 0x7: 'LAZY_SYMBOL_POINTERS', 0x8: 'SYMBOL_STUBS', 0x9: 'MOD_INIT_FUNC_POINTERS', 0xa: 'MOD_TERM_FUNC_POINTERS', 0xb: 'COALESCED', 0xc: 'GB_ZEROFILL', 0xd: 'INTERPOSING', 0xe: '16BYTE_LITERALS', 0xf: 'DTRACE_DOF', 0x10: 'LAZY_DYLIB_SYMBOL_POINTERS', 0x11: 'THREAD_LOCAL_REGULAR', 0x12: 'THREAD_LOCAL_ZEROFILL', 0x13: 'THREAD_LOCAL_VARIABLES', 0x14: 'THREAD_LOCAL_VARIABLE_POINTERS', 0x15: 'THREAD_LOCAL_INIT_FUNCTION_POINTERS' } operators = { 0: 'False', 1: 'True', 2: 'Ident', 3: 'AppleAnchor', 4: 'AnchorHash', 5: 'InfoKeyValue', 6: 'And', 7: 'Or', 8: 'CDHash', 9: 'Not', 10: 'InfoKeyField', 11: 'CertField', 12: 'TrustedCert', 13: 'TrustedCerts', 14: 'CertGeneric', 15: 'AppleGenericAnchor', 16: 'EntitlementField', 17: 'CertPolicy', 18: 'NamedAnchor', 19: 'NamedCode', 20: 'Platform' } thread_states = { 1: 'x86_THREAD_STATE32', 2: 'x86_FLOAT_STATE32', 3: 'x86_EXCEPTION_STATE32', 4: 'x86_THREAD_STATE64', 5: 'x86_FLOAT_STATE64', 6: 'x86_EXCEPTION_STATE64', 7: 'x86_THREAD_STATE', 8: 'x86_FLOAT_STATE', 9: 'x86_EXCEPTION_STATE', 10: 'x86_DEBUG_STATE32', 11: 'x86_DEBUG_STATE64', 12: 'x86_DEBUG_STATE', 13: 'THREAD_STATE_NONE', 14: 'x86_SAVED_STATE_1 (INTERNAL ONLY)', 15: 'x86_SAVED_STATE_2 (INTERNAL ONLY)', 16: 'x86_AVX_STATE32', 17: 'x86_AVX_STATE64', 18: 'x86_AVX_STATE' } flags = { 1: 'NOUNDEFS', 2: 'INCRLINK', 4: 'DYLDLINK', 8: 'BINDATLOAD', 16: 'PREBOUND', 32: 'SPLIT_SEGS', 64: 'LAZY_INIT', 128: 'TWOLEVEL', 256: 'FORCE_FLAT', 512: 'NOMULTIDEFS', 1024: 'NOFIXPREBINDING', 2048: 'PREBINDABLE', 4096: 'ALLMODSBOUND', 8192: 'SUBSECTIONS_VIA_SYMBOLS', 16384: 'CANONICAL', 32768: 'WEAK_DEFINES', 65536: 'BINDS_TO_WEAK', 131072: 'ALLOW_STACK_EXECUTION', 262144: 'ROOT_SAFE', 524288: 'SETUID_SAFE', 1048576: 'NOREEXPORTED_DYLIBS', 2097152: 'PIE', 4194304: 'DEAD_STRIPPABLE_DYLIB', 8388608: 'HAS_TLV_DESCRIPTORS', 16777216: 'NO_HEAP_EXECUTION', 33554432: 'APP_EXTENSION_SAFE' } stabs = { 0x20: 'GSYM', 0x22: 'FNAME', 0x24: 'FUN', 0x26: 'STSYM', 0x28: 'LCSYM', 0x2a: 'MAIN', 0x2e: 'BNSYM', 0x30: 'PC', 0x32: 'AST', 0x3a: 'MAC_UNDEF', 0x3c: 'OPT', 0x40: 'RSYM', 0x44: 'SLINE', 0x46: 'DSLINE', 0x48: 'BSLINE', 0x4e: 'ENSYM', 0x60: 'SSYM', 0x64: 'SO', 0x66: 'OSO', 0x80: 'LSYM', 0x82: 'BINCL', 0x84: 'SOL', 0x86: 'PARAMS', 0x88: 'VERSION', 0x8a: 'OLEVEL', 0xa0: 'PSYM', 0xa2: 'EINCL', 0xa4: 'ENTRY', 0xc0: 'LBRAC', 0xc2: 'EXCL', 0xe0: 'RBRAC', 0xe2: 'BCOMM', 0xe4: 'ECOMM', 0xe8: 'ECOML', 0xfe: 'LENG' } loadcommands = { 1: 'SEGMENT', 2: 'SYMTAB', 3: 'SYMSEG', 4: 'THREAD', 5: 'UNIXTHREAD', 6: 'LOADFVMLIB', 7: 'IDFVMLIB', 8: 'IDENT', 9: 'FVMFILE', 10: 'PREPAGE', 11: 'DYSYMTAB', 12: 'LOAD_DYLIB', 13: 'ID_DYLIB', 14: 'LOAD_DYLINKER', 15: 'ID_DYLINKER', 16: 'PREBOUND_DYLIB', 17: 'ROUTINES', 18: 'SUB_FRAMEWORK', 19: 'SUB_UMBRELLA', 20: 'SUB_CLIENT', 21: 'SUB_LIBRARY', 22: 'TWOLEVEL_HINTS', 23: 'PREBIND_CKSUM', 25: 'SEGMENT_64', 26: 'ROUTINES_64', 27: 'UUID', 29: 'CODE_SIGNATURE', 30: 'SEGMENT_SPLIT_INFO', 32: 'LAZY_LOAD_DYLIB', 33: 'ENCRYPTION_INFO', 34: 'DYLD_INFO', 36: 'VERSION_MIN_MACOSX', 37: 'VERSION_MIN_IPHONEOS', 38: 'FUNCTION_STARTS', 39: 'DYLD_ENVIRONMENT', 41: 'DATA_IN_CODE', 42: 'SOURCE_VERSION', 43: 'DYLIB_CODE_SIGN_DRS', 44: 'ENCRYPTION_INFO_64', 45: 'LINKER_OPTION', 46: 'LINKER_OPTIMIZATION_HINT', 47: 'VERSION_MIN_TVOS', 48: 'VERSION_MIN_WATCHOS', 49: 'NOTE', 50: 'BUILD_VERSION', 2147483672: 'LOAD_WEAK_DYLIB', 2147483676: 'RPATH', 2147483679: 'REEXPORT_DYLIB', 2147483682: 'DYLD_INFO_ONLY', 2147483683: 'LOAD_UPWARD_DYLIB', 2147483688: 'MAIN', } # CPU Types & Subtypes as defined in # http://opensource.apple.com/source/cctools/cctools-822/include/mach/machine.h cputypes = { -1: { -2: 'ANY', -1: 'MULTIPLE', 0: 'LITTLE_ENDIAN', 1: 'BIG_ENDIAN' }, 1: { -2: 'VAX', -1: 'MULTIPLE', 0: 'VAX_ALL', 1: 'VAX780', 2: 'VAX785', 3: 'VAX750', 4: 'VAX730', 5: 'UVAXI', 6: 'UVAXII', 7: 'VAX8200', 8: 'VAX8500', 9: 'VAX8600', 10: 'VAX8650', 11: 'VAX8800', 12: 'UVAXIII' }, 6: { -2: 'MC680x0', -1: 'MULTIPLE', 1: 'MC680x0_ALL or MC68030', 2: 'MC68040', 3: 'MC68030_ONLY' }, 7: {-2: 'X86 (I386)', -1: 'MULITPLE', 0: 'INTEL_MODEL_ALL', 3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386', 4: 'X86_ARCH1 or 486', 5: '586 or PENT', 8: 'X86_64_H or PENTIUM_3', 9: 'PENTIUM_M', 10: 'PENTIUM_4', 11: 'ITANIUM', 12: 'XEON', 15: 'INTEL_FAMILY_MAX', 22: 'PENTPRO', 24: 'PENTIUM_3_M', 26: 'PENTIUM_4_M', 27: 'ITANIUM_2', 28: 'XEON_MP', 40: 'PENTIUM_3_XEON', 54: 'PENTII_M3', 86: 'PENTII_M5', 103: 'CELERON', 119: 'CELERON_MOBILE', 132: '486SX' }, 10: { -2: 'MC98000', -1: 'MULTIPLE', 0: 'MC98000_ALL', 1: 'MC98601' }, 11: { -2: 'HPPA', -1: 'MULITPLE', 0: 'HPPA_ALL or HPPA_7100', 1: 'HPPA_7100LC' }, 12: { -2: 'ARM', -1: 'MULTIPLE', 0: 'ARM_ALL', 1: 'ARM_A500_ARCH', 2: 'ARM_A500', 3: 'ARM_A440', 4: 'ARM_M4', 5: 'ARM_V4T', 6: 'ARM_V6', 7: 'ARM_V5TEJ', 8: 'ARM_XSCALE', 9: 'ARM_V7', 10: 'ARM_V7F', 11: 'ARM_V7S', 12: 'ARM_V7K', 13: 'ARM_V8', 14: 'ARM_V6M', 15: 'ARM_V7M', 16: 'ARM_V7EM' }, 13: { -2: 'MC88000', -1: 'MULTIPLE', 0: 'MC88000_ALL', 1: 'MMAX_JPC or MC88100', 2: 'MC88110' }, 14: { -2: 'SPARC', -1: 'MULTIPLE', 0: 'SPARC_ALL or SUN4_ALL', 1: 'SUN4_260', 2: 'SUN4_110' }, 15: { -2: 'I860 (big-endian)', -1: 'MULTIPLE', 0: 'I860_ALL', 1: 'I860_860' }, 18: { -2: 'POWERPC', -1: 'MULTIPLE', 0: 'POWERPC_ALL', 1: 'POWERPC_601', 2: 'POWERPC_602', 3: 'POWERPC_603', 4: 'POWERPC_603e', 5: 'POWERPC_603ev', 6: 'POWERPC_604', 7: 'POWERPC_604e', 8: 'POWERPC_620', 9: 'POWERPC_750', 10: 'POWERPC_7400', 11: 'POWERPC_7450', 100: 'POWERPC_970' }, 16777223: { -2: 'X86_64', -1: 'MULTIPLE', 0: 'INTEL_MODEL_ALL', 3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386', 4: 'X86_ARCH1 or 486', 5: '586 or PENT', 8: 'X86_64_H or PENTIUM_3', 9: 'PENTIUM_M', 10: 'PENTIUM_4', 11: 'ITANIUM', 12: 'XEON', 15: 'INTEL_FAMILY_MAX', 22: 'PENTPRO', 24: 'PENTIUM_3_M', 26: 'PENTIUM_4_M', 27: 'ITANIUM_2', 28: 'XEON_MP', 40: 'PENTIUM_3_XEON', 54: 'PENTII_M3', 86: 'PENTII_M5', 103: 'CELERON', 119: 'CELERON_MOBILE', 132: '486SX', 2147483648 + 0: 'INTEL_MODEL_ALL', 2147483648 + 3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386', 2147483648 + 4: 'X86_ARCH1 or 486', 2147483648 + 5: '586 or PENT', 2147483648 + 8: 'X86_64_H or PENTIUM_3', 2147483648 + 9: 'PENTIUM_M', 2147483648 + 10: 'PENTIUM_4', 2147483648 + 11: 'ITANIUM', 2147483648 + 12: 'XEON', 2147483648 + 15: 'INTEL_FAMILY_MAX', 2147483648 + 22: 'PENTPRO', 2147483648 + 24: 'PENTIUM_3_M', 2147483648 + 26: 'PENTIUM_4_M', 2147483648 + 27: 'ITANIUM_2', 2147483648 + 28: 'XEON_MP', 2147483648 + 40: 'PENTIUM_3_XEON', 2147483648 + 54: 'PENTII_M3', 2147483648 + 86: 'PENTII_M5', 2147483648 + 103: 'CELERON', 2147483648 + 119: 'CELERON_MOBILE', 2147483648 + 132: '486SX' }, 16777228: { -2: 'ARM64', -1: 'MULTIPLE', 0: 'ARM64_ALL', 1: 'ARM64_V8', 2147483648 + 0: 'ARM64_ALL', 2147483648 + 1: 'ARM64_V8' }, 16777234: { -2: 'POWERPC64', -1: 'MULTIPLE', 0: 'POWERPC_ALL', 1: 'POWERPC_601', 2: 'POWERPC_602', 3: 'POWERPC_603', 4: 'POWERPC_603e', 5: 'POWERPC_603ev', 6: 'POWERPC_604', 7: 'POWERPC_604e', 8: 'POWERPC_620', 9: 'POWERPC_750', 10: 'POWERPC_7400', 11: 'POWERPC_7450', 100: 'POWERPC_970', 2147483648 + 0: 'POWERPC_ALL (LIB64)', 2147483648 + 1: 'POWERPC_601 (LIB64)', 2147483648 + 2: 'POWERPC_602 (LIB64)', 2147483648 + 3: 'POWERPC_603 (LIB64)', 2147483648 + 4: 'POWERPC_603e (LIB64)', 2147483648 + 5: 'POWERPC_603ev (LIB64)', 2147483648 + 6: 'POWERPC_604 (LIB64)', 2147483648 + 7: 'POWERPC_604e (LIB64)', 2147483648 + 8: 'POWERPC_620 (LIB64)', 2147483648 + 9: 'POWERPC_750 (LIB64)', 2147483648 + 10: 'POWERPC_7400 (LIB64)', 2147483648 + 11: 'POWERPC_7450 (LIB64)', 2147483648 + 100: 'POWERPC_970 (LIB64)' } }
Hell, I know that I have an abundance of sunglasses too but I am addicted to the new and shiny things of life. Shop with Zenni Optical Coupon, Save with Anycodes. Anycodes.com aims to make your shopping more enjoyable by collecting all active and working coupons and deals for you. That will accelerate the shift to online prescription glasses and eliminate. Learn about the Board of Directors, Executive Committees and CEO compensation in this industry. Zenni Optical Promo Code 50% Off 2019 Free Shipping Code. 50% off Zenni Optical is an online store that sells glasses for men, women and kids. I have a friend who was not satisfied, though, and is sure the prescription is wrong on her Zenni glasses.I was able to buy 3 pair of glasses (one with tinted lenses for sunglasses) for the same cost as a single pair locally.Express all facets of your personality with our low-priced glasses and sunglasses in every style, shape, and color imaginable.Zenni Optical Eyeglasses reviews: Worn out from attempting to buy Zenni glasses for the first time. After having a not so great experience buying glasses from my eye doctor, I heard about Zenni and decided to give it a try the next time I needed glasses. A wide variety of zenni optical options are available to you, such as stainless, alloy.Zenni Optical has all of the popular eyeglass frames for women at affordable prices.Milled has emails from ZenniOptical, including new arrivals, sales, discounts, and coupon codes.Before doing this cancellation procedure, you should check your profile first.The store is the place for customers that are willing to improve their eyesight in a stylish way at very affordable price. Zenni Womens Cat-Eye Prescription Glasses Gray Frame TR 2013412. A wide variety of zenni eyeglasses options are available to you, such as acetate, titanium, and stainless. At Zenni, we believe everyone should have access to high-quality, affordable eyewear. About 2 weeks ago I bought 4 pairs of eyeglasses from ZenniOptical.com and got them about a week. Additionally, you will need to know your PD (pupillary distance). Save on last-minute gifts with holiday sales and deals from Zenni Optical.Zenni Optical is an affordable place to purchase your prescription eyeglasses. Come to Zenni Optical and you will find prescription eyeglasses, sunglasses and eyewear of different styles, colors, shapes and meterial. Survey respondents who bought from Zenni spent a median of just.If you need prescription glasses, it will be necessary to have your eye prescription.By looking at Zenni Optical reviews and Zenni Optical complaints from shoppers around the country, our Zenni glasses review will help you determine whether Zenni prescription glasses are the best idea for your eye health—and your wallet. At Zenni Optical, we believe everyone should have access to high-quality, stylish eyewear.I paid for defective eyeglasses and they will not compensate. Accessories can make or break an outfit, and the most important accessory is the pair of shoes. We are going to keep the Zenni glasses as a back-up, and we may try ordering from them again in the future.Our trendy, not spendy eyewear allows you to express all sides of your personality without breaking the bank. Zenni Optical review with 3 Comments: I ordered twice from these folks.This prescription eyewear retailer lets customers order glasses online at the best value with Zenni Optical coupon codes and everyday glasses sales.About 96% of these are eyeglasses frames, 1% are eyeglasses lenses, and 1% are prisms.After I had my eyeglasses for about 40 days I noticed a crack in the lens. The process might be easier when we have a better idea of what styles of glasses work best for his face.
#!/usr/bin/env python # -*- coding: utf-8 -*- """ .. See the NOTICE file distributed with this work for additional information regarding copyright ownership. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import print_function import os.path import argparse import sys import tarfile import multiprocessing import json import shutil try: from urllib2 import urlopen except ImportError: from urllib.request import urlopen from random import random from string import ascii_letters as letters from basic_modules.workflow import Workflow from basic_modules.metadata import Metadata from utils import logger from utils import remap from tool.common import CommandLineParser from tool.common import format_utils from tool.tb_model import tbModelTool if '/opt/COMPSs/Bindings/python' in sys.path: sys.path.pop(sys.path.index('/opt/COMPSs/Bindings/python')) # ------------------------------------------------------------------------------ class tadbit_model(Workflow): # pylint: disable=invalid-name,too-few-public-methods """ Wrapper for the VRE form TADbit model. It has two main sections: - looks for optimal parameters for modeling a region - models a region for a given optimal parameters . """ configuration = {} def __init__(self, configuration=None): """ Initialise the tool with its configuration. Parameters ---------- configuration : dict a dictionary containing parameters that define how the operation should be carried out, which are specific to each Tool. """ tool_extra_config = json.load(open(os.path.dirname( os.path.abspath(__file__))+'/tadbit_wrappers_config.json')) os.environ["PATH"] += os.pathsep + format_utils.convert_from_unicode( tool_extra_config["bin_path"]) if configuration is None: configuration = {} self.configuration.update(format_utils.convert_from_unicode(configuration)) # Number of cores available num_cores = multiprocessing.cpu_count() self.configuration["ncpus"] = num_cores tmp_name = ''.join([letters[int(random()*52)]for _ in range(5)]) if 'execution' in self.configuration: self.configuration['project'] = self.configuration['execution'] self.configuration['workdir'] = self.configuration['project']+'/_tmp_tadbit_'+tmp_name if not os.path.exists(self.configuration['workdir']): os.makedirs(self.configuration['workdir']) self.configuration["optimize_only"] = "generation:num_mod_comp" not in self.configuration if "optimization:max_dist" in self.configuration and \ not self.configuration["optimize_only"]: del self.configuration["optimization:max_dist"] del self.configuration["optimization:upper_bound"] del self.configuration["optimization:lower_bound"] del self.configuration["optimization:cutoff"] self.configuration.update( {(key.split(':'))[-1]: val for key, val in self.configuration.items()} ) if self.configuration["gen_pos_chrom_name"] == 'all': self.configuration["gen_pos_chrom_name"] = "" self.configuration["gen_pos_begin"] = "" self.configuration["gen_pos_end"] = "" if "gen_pos_begin" not in self.configuration: self.configuration["gen_pos_begin"] = "" if "gen_pos_end" not in self.configuration: self.configuration["gen_pos_end"] = "" def run(self, input_files, metadata, output_files): """ Parameters ---------- files_ids : list List of file locations metadata : list Required meta data output_files : list List of output file locations Returns ------- outputfiles : list List of locations for the output bam files """ logger.info( "PROCESS MODEL - FILES PASSED TO TOOLS: {0}".format( str(input_files["hic_contacts_matrix_norm"])) ) m_results_meta = {} m_results_files = {} if "norm" in metadata['hic_contacts_matrix_norm'].meta_data: if metadata['hic_contacts_matrix_norm'].meta_data["norm"] != 'norm': clean_temps(self.configuration['workdir']) logger.fatal("Only normalized matrices can be used to build 3D models.\nExiting") raise ValueError('Missing normalized input matrix.') input_metadata = remap(self.configuration, "optimize_only", "gen_pos_chrom_name", "resolution", "gen_pos_begin", "gen_pos_end", "max_dist", "upper_bound", "lower_bound", "cutoff", "workdir", "project", "ncpus") in_files = [format_utils.convert_from_unicode(input_files['hic_contacts_matrix_norm'])] input_metadata["species"] = "Unknown" input_metadata["assembly"] = "Unknown" if "assembly" in metadata['hic_contacts_matrix_norm'].meta_data: input_metadata["assembly"] = metadata['hic_contacts_matrix_norm'].meta_data["assembly"] if metadata['hic_contacts_matrix_norm'].taxon_id: dt_json = json.load(urlopen( "http://www.ebi.ac.uk/ena/data/taxonomy/v1/taxon/tax-id/" + str(metadata['hic_contacts_matrix_norm'].taxon_id))) input_metadata["species"] = dt_json['scientificName'] input_metadata["num_mod_comp"] = self.configuration["num_mod_comp"] input_metadata["num_mod_keep"] = self.configuration["num_mod_keep"] tm_handler = tbModelTool() tm_files, _ = tm_handler.run(in_files, input_metadata, []) m_results_files["modeling_stats"] = self.configuration['project']+"/model_stats.tar.gz" tar = tarfile.open(m_results_files["modeling_stats"], "w:gz") tar.add(tm_files[0], arcname='modeling_files_and_stats') tar.close() if not self.configuration["optimize_only"]: m_results_files["tadkit_models"] = self.configuration['project'] + "/" + \ os.path.basename(tm_files[1]) os.rename(tm_files[1], m_results_files["tadkit_models"]) m_results_meta["tadkit_models"] = Metadata( data_type="chromatin_3dmodel_ensemble", file_type="JSON", file_path=m_results_files["tadkit_models"], sources=in_files, meta_data={ "description": "Ensemble of chromatin 3D structures", "visible": True, "assembly": input_metadata["assembly"] }, taxon_id=metadata['hic_contacts_matrix_norm'].taxon_id) # List of files to get saved logger.info("TADBIT RESULTS: " + ','.join( [str(m_results_files[k]) for k in m_results_files])) m_results_meta["modeling_stats"] = Metadata( data_type="tool_statistics", file_type="TAR", file_path=m_results_files["modeling_stats"], sources=in_files, meta_data={ "description": "TADbit modeling statistics and result files", "visible": True }) clean_temps(self.configuration['workdir']) return m_results_files, m_results_meta # ------------------------------------------------------------------------------ def main(args): """ Main function """ from apps.jsonapp import JSONApp app = JSONApp() result = app.launch(tadbit_model, args.config, args.in_metadata, args.out_metadata) return result def clean_temps(working_path): """Cleans the workspace from temporal folder and scratch files""" for the_file in os.listdir(working_path): file_path = os.path.join(working_path, the_file) try: if os.path.isfile(file_path): os.unlink(file_path) elif os.path.isdir(file_path): shutil.rmtree(file_path) except OSError: pass try: os.rmdir(working_path) except OSError: pass logger.info('[CLEANING] Finished') def make_absolute_path(files, root): """Make paths absolute.""" for role, path in files.items(): files[role] = os.path.join(root, path) return files # ------------------------------------------------------------------------------ if __name__ == "__main__": sys._run_from_cmdl = True # pylint: disable=protected-access # Set up the command line parameters PARSER = argparse.ArgumentParser(description="TADbit map") # Config file PARSER.add_argument("--config", help="Configuration JSON file", type=CommandLineParser.valid_file, metavar="config", required=True) # Metadata PARSER.add_argument("--in_metadata", help="Project metadata", metavar="in_metadata", required=True) # Output metadata PARSER.add_argument("--out_metadata", help="Output metadata", metavar="output_metadata", required=True) # Log file PARSER.add_argument("--log_file", help="Log file", metavar="log_file", required=True) IN_ARGS = PARSER.parse_args() RESULTS = main(IN_ARGS)
Posted on August 30, 2018 August 30, 2018 by CBS Kenya Ltd. Customer Visit Management System (CVM) is an innovative solution that handles the whole visit of your customers, even before they step into your branch, offering them a unique experience that far exceeds their expectations, in order to turn them into your loyal customers. The system grants you full control over your branches, bringing great value to both you and your customers. CVM solution consists of a set of integrated sub-systems that work together to help you manage your customer visit at a lower cost, in less time, and turns your customers visit to an opportunity to increase revenues. CBS Kenya has implemented the system in NIC Bank's branches across the country.
""" Encoding/decoding of communion messages message must be a dict containing: "mode": "request" or "response" "id": 32-bit identifier, should increase "content": None, bool, bytes, str, int, float, or tuple of str/int/float/bool remaining keys: anything JSON-serializable encoded message is binary, and consists of: header SEAMLESS tip: 0 for request, 1 for response identifier: 32-bit nrem: 32-bit, the length of the remaining keys buffer (after content) content: is_str byte + remainder. For is_str: 0: No remainder, message is None 1: bool. remainder is 0 or 1 2: bytes. remainder is raw content 3: str. remainder is UTF-8 encoded content 4: int/float/tuple. remainder is JSON-encoded content. rem: remaining keys buffer (JSON format) """ import numpy as np import json def communion_encode(msg): assert msg["mode"] in ("request", "response") m = 'SEAMLESS'.encode() tip = b'\x00' if msg["mode"] == "request" else b'\x01' m += tip m += np.uint32(msg["id"]).tobytes() remainder = msg.copy() remainder.pop("mode") remainder.pop("id") remainder.pop("content") if len(remainder.keys()): rem = json.dumps(remainder).encode() nrem = np.uint32(len(rem)).tobytes() m += nrem m += rem else: m += b'\x00\x00\x00\x00' content = msg["content"] if content is None: m += b'\x00' else: assert isinstance(content, (str, int, float, bytes, bool, tuple)), content if isinstance(content, bool): is_str = b'\x01' elif isinstance(content, (int, float, tuple)): is_str = b'\x04' else: is_str = b'\x03' if isinstance(content, str) else b'\x02' m += is_str if isinstance(content, str): content = content.encode() elif isinstance(content, bool): content = b'\x01' if content else b'\x00' elif isinstance(content, (int, float, tuple)): if isinstance(content, tuple): for item in content: assert item is None or isinstance(item, (str, int, float, bool)), type(item) content = json.dumps(content).encode() m += content assert communion_decode(m) == msg, (communion_decode(m), msg) return m def communion_decode(m): assert isinstance(m, bytes) message = {} head = 'SEAMLESS'.encode() assert m[:len(head)] == head m = m[len(head):] tip = m[:1] m = m[1:] assert tip == b'\x01' or tip == b'\x00', tip message["mode"] = "request" if tip == b'\x00' else "response" l1, l2 = m[:4], m[4:8] m = m[8:] message["id"] = np.frombuffer(l1,np.uint32)[0] nrem = np.frombuffer(l2,np.uint32)[0] if nrem: rem = m[:nrem] rem = rem.decode() rem = json.loads(rem) message.update(rem) m = m[nrem:] is_str = m[:1] if is_str == b'\x00': content = None elif is_str == b'\x01': content = True if m[1:] == b'\x01' else False elif is_str == b'\x04': content = json.loads(m[1:]) assert isinstance(content, (int, float, list)) if isinstance(content, list): for item in content: assert item is None or isinstance(item, (str, int, float, bool)), type(item) content = tuple(content) else: assert is_str == b'\x03' or is_str == b'\x02' content = m[1:] if is_str == b'\x03': content = content.decode() message["content"] = content return message
The e-commerce giant’s love affair with brick-and-mortar continues with a new store concept, Amazon 4-Star. The store, which opened its doors Thursday in Soho, is similar to Amazon Books, but with a range of products besides books stocked on its shelves–all of which must have a rating of at least 4 stars by Amazon customers, Forbes reported. Kitchen appliances, home gadgets, devices and electronics, toys and games–the store is stocked with popular items most people are likely to buy.
# coding=utf-8 __author__ = 'walthermaciel' from geopy.geocoders import DataBC from geopy.exc import GeopyError from time import sleep import sys from ssl import SSLError from create_feature_vector import create_vector import os import pandas as pd from sklearn.externals import joblib from sklearn.ensemble import RandomForestClassifier from sklearn.ensemble import RandomForestRegressor crime_id = {0: 'BNE Residential ', 1: 'Theft from Vehicle', 2: 'Other Thefts ', 3: 'Mischief ', 4: 'Theft of Vehicle ', 5: 'BNE Commercial '} def gather_time(): print 'Year:\t', year = sys.stdin.readline().strip() month_ok = False while not month_ok: print 'Month:\t', month = sys.stdin.readline().strip() if 12 >= int(month) > 0: month_ok = True else: print 'Nice try, champ...' return int(year), int(month) def gather_address(): print 'Street Number:\t', st_num = sys.stdin.readline().strip() print 'Street Name:\t', st_name = sys.stdin.readline().strip() address = st_num + ' ' + st_name + ', Vancouver, BC, Canada' return address def gather_lat_long(address): print 'Researching lat long for ' + address + '...' got_it = False delay = 1 while not got_it: if delay > 10: print 'could not find address, exiting...' exit() try: sleep(delay) location = geolocator.geocode(address) got_it = True except (GeopyError, SSLError) as e: delay *= 2 got_it = False print '!!! Are you sure you got the right address? Trying again...' print 'Got it!' latitude = "{:.8f}".format(location.latitude) longitude = "{:.8f}".format(location.longitude) print 'LatLong:\t( ' + latitude + ', ' + longitude + ' )' return location.latitude, location.longitude def run_demo(): os.system('clear') print ''' 888 888 .d8888b. d8b 888 888 d88P Y88b Y8P 888 888 888 888 Y88b d88P 8888b. 88888b. 888 888d888 888 88888b.d88b. .d88b. Y88b d88P "88b 888 "88b 888 888P" 888 888 "888 "88b d8P Y8b Y88o88P .d888888 888 888 888 888 888 888 888 888 888 88888888 Y888P 888 888 888 888 Y88b d88P 888 888 888 888 888 Y8b. Y8P "Y888888 888 888 "Y8888P" 888 888 888 888 888 "Y8888 ------------------ https://github.com/wmaciel/van-crime ----------------- ''' year, month = gather_time() address = gather_address() latitude, longitude = gather_lat_long(address) print 'Generating feature vector...', f_vec = create_vector(int(year), int(month), latitude, longitude) if isinstance(f_vec, int): print 'Failed' else: print 'OK' print 'Loading classification model...', clf = joblib.load('../models/random_forest_model.p') print 'OK' print 'Loading regression model...', reg = joblib.load('../models/RandomForestRegressor.p') print 'OK' print '\n\n----- Results -----' print 'Probability of crime type, given that a crime happened:' prob_list = clf.predict_proba(f_vec.as_matrix())[0] for i, p in enumerate(prob_list): print crime_id[i] + '\t' + "{:.2f}".format(p * 100) + '%' print '--------------------------\n' print 'Expected number of crimes to happen:' expected = reg.predict(f_vec.as_matrix())[0] print expected print '--------------------------\n' print 'Expected number of crimes to happen by type:' for i, p in enumerate(prob_list): print crime_id[i] + '\t' + "{:.2f}".format(p * expected) if __name__ == '__main__': geolocator = DataBC() while True: run_demo() print '\npress enter to reset' sys.stdin.readline()
Jan Schwartz is co-founder of Education and Training Solutions, a web-based e-learning company that produces online courses for those in the health and wellness fields. She has worked in education since 1988. I went to massage school to learn more about anatomy. I was a pretty active person and wanted to know more about how my body worked, but I had had enough of college and wanted a more fun way to learn. As it turns out, I also loved learning about massage and how it helps the body heal in certain ways. My background was in education and training so I did gravitate towards education after I was two years out of school. I was able to combine the two careers and I became an instructor and then an education director. I was fortunate in that the owner of the school I attended was big on volunteering. She steered me that direction. I was a committee chair for my local AMTA chapter, and then I became a Commissioner for the Commission on Massage Therapy Accreditation (COMTA), then the Chair of COMTA. I did some volunteer work for NCBTMB and then became a Trustee for the Massage Therapy Foundation. For two years I was the massage therapy ambassador to the Academy of Integrative Health and Medicine (AIHM) and currently I am on the board of the Academic Collaborative for Integrative Health (ACIH) and also a member of the Board of Trustees for the University of Western States in Oregon. Volunteering has been a big part of my career in massage therapy. I stay engaged through my volunteer work, which is a great learning environment as well. I also go to relevant conferences and am now most interested in inter-professional education. That is, education that brings healthcare practitioners of different fields closer together in how they work with patients/clients. When I was Chair of COMTA we received our initial approval from the U.S. Dept. of Education. That was a long time coming. By the time our renewal came around we had developed the first set of competencies for the field of massage therapy. As a group we did some excellent work, that still exists today. I am proud to have been a part of those processes. More recently, I am proud to have been a co-leader of the team that developed competencies for optimal inter-professional practice through the ACIH. The best part for me now is meeting like minded people in the fields of health and wellness; working on how to make our healthcare system more equitable, more affordable and more patient/client centered. Once you get your practice going, whether you practice solo, in a group or at a spa, and feel comfortable with yourself in your new career, I would strongly advise therapists to get involved in local organizations that promote health and wellness. It could be a massage organization, or any other organization that you think would benefit from the expertise you bring. Don’t think too narrowly—it’s a great way to network! We have to carry the message that massage has many benefits to help people optimize their health.
#-*- coding: utf-8 -*- from .usuario import Usuario from .nivel_acesso import * from .senha_criptografada import * from domain.excecoes import * from domain.email import EmailUsuarioCadastrado, EmailUsuarioAlterado, EmailUsuarioRemovido class ServicoCRUDUsuario(): """Essa classe modela um serviço CRUD para Usuários, que independe da implementação do armazenamento. :param repositorio: Objeto de RepositorioUsuario""" def __init__(self, repositorio, servico_email): self.repositorio = repositorio self.servico_email = servico_email def criar(self, dados): """Cria um Usuário. Implementa o UC12 (Adicionar Usuário). :param dados: Objeto de DTOUsuario com os dados a serem inseridos.""" escolha = { 0: UsuarioComum(), 1: SistemaManutencao(), 2: Administrador(), } try: nivelAcesso = escolha[dados.nivelAcesso] except KeyError: raise ExcecaoNivelAcessoInvalido senhaCriptografada = SenhaCriptografada(dados.senha) usuario = Usuario(dados.nome, dados.email, senhaCriptografada, nivelAcesso) if self.repositorio.obter_por_email(dados.email): raise ExcecaoUsuarioJaExistente usuario = self.repositorio.inserir(usuario) email = EmailUsuarioCadastrado(usuario, dados.senha) self.servico_email.enviar(usuario.email, email) return usuario def alterar(self, _id, dados): """Atualiza os dados de um Usuário. Implementa o UC13 (Alterar Usuário). :param _id: Número inteiro que representa o ID do Usuário desejado. :param dados: Objeto de DTOUsuario com os dados a serem inseridos.""" usuario = self.repositorio.obter(_id) if not usuario: raise ExcecaoUsuarioInexistente #Usuário que possui o e-mail para o qual se deseja alterar usuarioDoEmail = self.repositorio.obter_por_email(dados.email) if usuarioDoEmail and usuarioDoEmail.id != _id: raise ExcecaoUsuarioJaExistente escolha = { 0: UsuarioComum(), 1: SistemaManutencao(), 2: Administrador(), } try: usuario.nivelAcesso = escolha[dados.nivelAcesso] except KeyError: raise ExcecaoNivelAcessoInvalido usuario.nome = dados.nome usuario.email = dados.email if dados.senha: usuario.senhaCriptografada = SenhaCriptografada(dados.senha) self.repositorio.atualizar(usuario) email = EmailUsuarioAlterado(usuario) self.servico_email.enviar(usuario.email, email) return usuario def listar(self): """Lista todos os Usuários, retornando uma lista de objetos de Usuario. Implementa parte do UC04 (Buscar Usuário).""" return self.repositorio.listar() def obter(self, _id): """Busca pelo Usuário de um ID fornecido e o retorna. Implementa parte do UC04 (Buscar Usuário). :param _id: Número inteiro que representa o ID do Usuário desejado.""" usuario = self.repositorio.obter(_id) if not usuario: raise ExcecaoUsuarioInexistente return usuario def remover(self, _id): """Remove o Usuário que possui o ID fornecido e o retorna, além de cancelar todos os seus Agendamentos. Implementa o UCXXX (Remover Usuário). :param _id: Número inteiro que representa o ID do Usuário desejado.""" #TODO: buscar por agendamentos associados ao Usuário com id _id usuario = self.repositorio.obter(_id) if not usuario: raise ExcecaoUsuarioInexistente email = EmailUsuarioRemovido(usuario) self.servico_email.enviar(usuario.email, email) #TODO: cancela todos os agendamentos da lista return (self.repositorio.remover(_id), True)
Living at home with an incurable illness can be a painful, difficult and distressing experience without proper specialist support. Everyday tasks such as bathing, getting out of a chair and moving around become monumental challenges for both the patient and their family members to overcome. Our occupational therapists support patients who wish to remain at home by providing specialist equipment, advice and guidance to help them maintain their independence. We have our own team of social workers who can provide counselling and act as advocates for our patients and their families. Our social workers can give advice about, financial and legal matter; care packages at home, nursing homes and residential homes; and social services and other agencies. They also provide bereavement support. Our doctors are able to visit patients at home to offer specialist advice, working in partnership with primary care teams, on the complex physical and psychological effects of living with a life limiting illness.
#!/usr/bin/python import os, sys # low level handling, such as command line stuff import string # string methods available import getopt # comand line argument handling from collections import defaultdict from low import * # custom functions, written by myself # ============================================================================= def show_help( ): """ displays the program parameter list and usage information """ print >> sys.stderr, "usage: " + sys.argv[0] + " -d <gff-folder>" stdout( " option description" ) stdout( " -h help (this text here)" ) stdout( " -d folder with gff files to parse" ) stdout( " " ) sys.exit(1) # ============================================================================= def handle_arguments(): """ verifies the presence of all necessary arguments and returns the data dir """ if len ( sys.argv ) == 1: stderr( "no arguments provided." ) show_help() try: # check for the right arguments keys, values = getopt.getopt( sys.argv[1:], "hd:" ) except getopt.GetoptError: stderr( "invalid arguments provided." ) show_help() args = {} for key, value in keys: if key == '-d': args['dir'] = value if not args.has_key('dir'): print >> sys.stderr, "gff dir argument missing." show_help() elif not dir_exists( args.get('dir') ): print >> sys.stderr, "gff dir does not exist." show_help() if not args['dir'].endswith("/"): args['dir'] += '/' return args # ============================================================================= # === MAIN ==================================================================== # ============================================================================= def main( args ): def process_gff_line(line, species): if line.startswith("#") or len(line.rstrip()) == 0: return columns = line.rstrip().split("\t") if len(columns) != 9: return type = columns[2] if type != "gene": return chr, start, stop, strand, descr = columns[0], columns[3], columns[4], columns[6], columns[8] id = re.search("ID=([^;]+);", descr).group(1) sys.stdout.write(species + "\t" + id + "\t") print string.join([chr, start, stop, strand], "\t") # ============================================================================= for filename in os.listdir(args['dir']): gzip = 0 if not filename.endswith(".gff") and not filename.endswith(".gff.gz"): continue species = filename[:filename.index("-")] filename = args['dir'] + filename if filename.endswith(".gff.gz"): gzip = 1 if gzip: os.system("gunzip " + filename) filename = filename[:-3] fo = open(filename) for line in fo: process_gff_line(line, species) fo.close() if gzip: os.system("gzip " + filename) # ============================================================================= args = handle_arguments() main( args )
I have been living in the Bay Area for just over a year now, and I’ve really enjoyed witnessing the full rotation of the seasons: Autumn had beautiful yellows and ambers and browns, the Winter – soft blues and greys, Spring – a verdant green with pinks and lilacs, and Summer is currently full of dry golden grasses, a muted blue sky and white clouds. One of the newest contributors to the greatest design blog on the web, Design*Sponge, is Lauren Willhite. She’s a Graphic Designer based in Portland, Oregon, and her blog, Color Collective is a treasure trove of loveliness. It’s been years now, but I’m still really gravitating toward the blue/green/grey combinations, with pops of warm colour like yellow and orange. I love this combination because the greens and blues are balances out with the warmer tones of yellow, orange and bone. And how cool would it be to paint a room more than three colours? I'd love my world painted like this! This above is quite possibly my favourite pallette out of all the ones I have featured. I love the ‘pop’ the orange gives this work without dominating the blue and green tones and the yellow-based neutral. Compliments it perfectly. Splendid! I love the inclusion of a deep, rich grey in this colour palette above. And I adore the ethereal quality of the photographs, too. Why not use all of these colours? By using the pink and purple alongside the blues and green, it feels slightly more whimsical, and high energy. And I included this one because I responded to it. I love the periwinkle walls we have at the moment, and love it balanced with the warm oranges instead of the pops of red we currently have. I love the resource Lauren offers designers, artists and others like me through Color Collective. Would be a great place to stop by and have a poke around when you’re next deciding on colours for paint, projects, etc. What colours are currently inspiring you? What colour palette would you choose if you were to revamp your space?
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import errno import logging import os.path import sys from hadoop import confparse from desktop.lib.security_util import get_components if sys.version_info[0] > 2: open_file = open else: open_file = file LOG = logging.getLogger(__name__) SITE_PATH = None SITE_DICT = None _CNF_HBASE_THRIFT_KERBEROS_PRINCIPAL = 'hbase.thrift.kerberos.principal' _CNF_HBASE_THRIFT_SPNEGO_PRINCIPAL = 'hbase.thrift.spnego.principal' _CNF_HBASE_AUTHENTICATION = 'hbase.security.authentication' _CNF_HBASE_REGIONSERVER_THRIFT_FRAMED = 'hbase.regionserver.thrift.framed' _CNF_HBASE_IMPERSONATION_ENABLED = 'hbase.thrift.support.proxyuser' _CNF_HBASE_USE_THRIFT_HTTP = 'hbase.regionserver.thrift.http' _CNF_HBASE_USE_THRIFT_SSL = 'hbase.thrift.ssl.enabled' def reset(): global SITE_DICT SITE_DICT = None def get_conf(): if SITE_DICT is None: _parse_site() return SITE_DICT def get_server_principal(): thrift_principal = get_conf().get(_CNF_HBASE_THRIFT_KERBEROS_PRINCIPAL, None) principal = get_conf().get(_CNF_HBASE_THRIFT_SPNEGO_PRINCIPAL, thrift_principal) components = get_components(principal) if components is not None: return components[0] def get_server_authentication(): return get_conf().get(_CNF_HBASE_AUTHENTICATION, 'NOSASL').upper() def get_thrift_transport(): use_framed = get_conf().get(_CNF_HBASE_REGIONSERVER_THRIFT_FRAMED) if use_framed is not None: if use_framed.upper() == "TRUE": return "framed" else: return "buffered" else: #Avoid circular import from hbase.conf import THRIFT_TRANSPORT return THRIFT_TRANSPORT.get() def is_impersonation_enabled(): #Avoid circular import from hbase.conf import USE_DOAS return get_conf().get(_CNF_HBASE_IMPERSONATION_ENABLED, 'FALSE').upper() == 'TRUE' or USE_DOAS.get() def is_using_thrift_http(): #Avoid circular import from hbase.conf import USE_DOAS return get_conf().get(_CNF_HBASE_USE_THRIFT_HTTP, 'FALSE').upper() == 'TRUE' or USE_DOAS.get() def is_using_thrift_ssl(): return get_conf().get(_CNF_HBASE_USE_THRIFT_SSL, 'FALSE').upper() == 'TRUE' def _parse_site(): global SITE_DICT global SITE_PATH #Avoid circular import from hbase.conf import HBASE_CONF_DIR SITE_PATH = os.path.join(HBASE_CONF_DIR.get(), 'hbase-site.xml') try: data = open_file(SITE_PATH, 'r').read() except IOError as err: if err.errno != errno.ENOENT: LOG.error('Cannot read from "%s": %s' % (SITE_PATH, err)) return data = "" SITE_DICT = confparse.ConfParse(data)
L.3.4. Determine or clarify the meaning of unknown and multiple-meaning word and phrases based on grade 3 reading and content, choosing flexibly from a range of strategies. L.3.4(a) Use sentence-level context as a clue to the meaning of a word or phrase. L.3.4(b) Determine the meaning of the new word formed when a known affix is added to a known word (e.g., agreeable/disagreeable, comfortable/uncomfortable, care/careless, heat/preheat). L.3.4(c) Use a known root word as a clue to the meaning of an unknown word with the same root (e.g., company, companion). L.3.4(d) Use glossaries or beginning dictionaries, both print and digital, to determine or clarify the precise meaning of key words and phrases. L.3.5. Demonstrate understanding of word relationships and nuances in word meanings. L.3.5(b) Identify real-life connections between words and their use (e.g., describe people who are friendly or helpful). L.3.6. Acquire and use accurately grade-appropriate conversational, general academic, and domain-specific words and phrases, including those that signal spatial and temporal relationships (e.g., After dinner that night we went looking for them). L.3.3. Use knowledge of language and its conventions when writing, speaking, reading, or listening. L.3.3(a) Choose words and phrases for effect. L.3.1. Demonstrate command of the conventions of standard English grammar and usage when writing or speaking. L.3.1(a) Explain the function of nouns, pronouns, verbs, adjectives, and adverbs in general and their functions in particular sentences. L.3.1(b) Form and use regular and irregular plural nouns. L.3.1(d) Form and use regular and irregular verbs. L.3.1(e) Form and use the simple (e.g., I walked; I walk; I will walk) verb tenses. L.3.1(f) Ensure subject-verb and pronoun-antecedent agreement. L.3.2. Demonstrate command of the conventions of standard English capitalization, punctuation, and spelling when writing. L.3.2(b) Use commas in addresses. L.3.2(c) Use commas and quotation marks in dialogue. L.3.2(d) Form and use possessives. L.3.2(e) Use conventional spelling for high-frequency and other studied words and for adding suffixes to base words (e.g., sitting, smiled, cries, happiness). L.3.2(f) Use spelling patterns and generalizations (e.g., word families, position-based spellings, syllable patterns, ending rules, meaningful word parts) in writing words. RF.3.4. Read with sufficient accuracy and fluency to support comprehension. RF.3.4(a) Read on-level text with purpose and understanding. RF.3.4(c) Use context to confirm or self-correct word recognition and understanding, rereading as necessary. RF.3.3. Know and apply grade-level phonics and word analysis skills in decoding words. RF.3.3(a) Identify and know the meaning of the most common prefixes and derivational suffixes. RF.3.3(b) Decode words with common Latin suffixes. RF.3.3(d) Read grade-appropriate irregularly spelled words. RI.3.10. By the end of the year, read and comprehend informational texts, including history/social studies, science, and technical texts, at the high end of the grades 2-3 text complexity band independently and proficiently. RI.3.7. Use information gained from illustrations (e.g., maps, photographs) and the words in a text to demonstrate understanding of the text (e.g., where, when, why, and how key events occur). RI.3.8. Describe the logical connection between particular sentences and paragraphs in a text (e.g., comparison, cause/effect, first/second/third in a sequence). RI.3.4. Determine the meaning of general academic and domain-specific words and phrases in a text relevant to a grade 3 topic or subject area. RI.3.5. Use text features and search tools (e.g., key words, sidebars, hyperlinks) to locate information relevant to a given topic efficiently. RI.3.1. Ask and answer questions to demonstrate understanding of a text, referring explicitly to the text as the basis for the answers. RI.3.2. Determine the main idea of a text; recount the key details and explain how they support the main idea. RI.3.3. Describe the relationship between a series of historical events, scientific ideas or concepts, or steps in technical procedures in a text, using language that pertains to time, sequence, and cause/effect. RL.3.10. By the end of the year, read and comprehend literature, including stories, dramas, and poetry, at the high end of the grades 2-3 text complexity band independently and proficiently. RL.3.7. Explain how specific aspects of a text's illustrations contribute to what is conveyed by the words in a story (e.g., create mood, emphasize aspects of a character or setting). RL.3.9. Compare and contrast the themes, settings, and plots of stories written by the same author about the same or similar characters (e.g., in books from a series). RL.3.1. Ask and answer questions to demonstrate understanding of a text, referring explicitly to the text as the basis for the answers. RL.3.2. Recount stories, including fables, folktales, and myths from diverse cultures; determine the central message, lesson, or moral and explain how it is conveyed through key details in the text. Flip Charts STOP! LOOK! LISTEN! RL.3.3. Describe characters in a story (e.g., their traits, motivations, or feelings) and explain how their actions contribute to the sequence of events. RL.3.4. Determine the meaning of words and phrases as they are used in a text, distinguishing literal from nonliteral language. RL.3.5. Refer to parts of stories, dramas, and poems when writing or speaking about a text, using terms such as chapter, scene, and stanza; describe how each successive part builds on earlier sections. RL.3.6. Distinguish their own point of view from that of the narrator or those of the characters. SL.3.2. Determine the main ideas and supporting details of a text read aloud or information presented in diverse media and formats, including visually, quantitatively, and orally. W.3.10. Write routinely over extended time frames (time for research, reflection, and revision) and shorter time frames (a single sitting or a day or two) for a range of discipline-specific tasks, purposes, and audiences. W.3.5. With guidance and support from peers and adults, develop and strengthen writing as needed by planning, revising, and editing. W.3.2. Write informative/explanatory texts to examine a topic and convey ideas and information clearly. W.3.2(b) Develop the topic with facts, definitions, and details. W.3.2(c) Use linking words and phrases (e.g., also, another, and, more, but) to connect ideas within categories of information. W.3.2(d) Provide a concluding statement or section. W.3.3. Write narratives to develop real or imagined experiences or events using effective technique, descriptive details, and clear event sequences. W.3.3(b) Use dialogue and descriptions of actions, thoughts, and feelings to develop experiences and events or show the response of characters to situations.
""" Contains disassembler stuff. """ from ..arch.data_instructions import DByte class Disassembler: """ Base disassembler for some architecture """ def __init__(self, arch): self.arch = arch for instruction in arch.isa.instructions: # print(instruction, instruction.patterns) # for nl in instruction.non_leaves: # print(' ', nl.patterns) pass def disasm(self, data, outs, address=0): """ Disassemble data into an instruction stream """ # TODO: implement this! # The trial and error method, will be slow as a snail: # for instruction in self.arch.isa.instructions: # for size in instruction.sizes(): # part = data[:size] # try: # print(instruction, part, size) # i = instruction.decode(part) # print(i) # except ValueError: # pass # For now, all is bytes! for byte in data: ins = DByte(byte) ins.address = address outs.emit(ins) address += len(ins.encode()) def take_one(self): pass
Mvhigh is a centralized place where you'll get various gift ideas, deals, sales, and coupons from largest and famous online stores like Amazon, Walmart, Ebay, and more. Price comparison for Angels Notepad, deals and coupons help you save on your online shopping. The Sketchpad with a Secret Power!!!
# Copyright 2013-2015 Camptocamp SA - Nicolas Bessi # Copyright 2018 Camptocamp SA - Julien Coux # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from odoo import _, api, models from odoo.exceptions import UserError from odoo.tools.float_utils import float_compare class StockPicking(models.Model): """Adds picking split without done state.""" _inherit = "stock.picking" @api.multi def split_process(self): """Use to trigger the wizard from button with correct context""" for picking in self: # Check the picking state and condition before split if picking.state == 'draft': raise UserError(_('Mark as todo this picking please.')) if all([x.qty_done == 0.0 for x in picking.move_line_ids]): raise UserError( _('You must enter done quantity in order to split your ' 'picking in several ones.')) # Split moves considering the qty_done on moves new_moves = self.env['stock.move'] for move in picking.move_lines: rounding = move.product_uom.rounding qty_done = move.quantity_done qty_initial = move.product_uom_qty qty_diff_compare = float_compare( qty_done, qty_initial, precision_rounding=rounding ) if qty_diff_compare < 0: qty_split = qty_initial - qty_done qty_uom_split = move.product_uom._compute_quantity( qty_split, move.product_id.uom_id, rounding_method='HALF-UP' ) new_move_id = move._split(qty_uom_split) for move_line in move.move_line_ids: if move_line.product_qty and move_line.qty_done: # To avoid an error # when picking is partially available try: move_line.write( {'product_uom_qty': move_line.qty_done}) except UserError: pass new_moves |= self.env['stock.move'].browse(new_move_id) # If we have new moves to move, create the backorder picking if new_moves: backorder_picking = picking.copy({ 'name': '/', 'move_lines': [], 'move_line_ids': [], 'backorder_id': picking.id, }) picking.message_post( _( 'The backorder <a href="#" ' 'data-oe-model="stock.picking" ' 'data-oe-id="%d">%s</a> has been created.' ) % ( backorder_picking.id, backorder_picking.name ) ) new_moves.write({ 'picking_id': backorder_picking.id, }) new_moves.mapped('move_line_ids').write({ 'picking_id': backorder_picking.id, }) new_moves._action_assign()
The final — and main conference day — for UXLX saw 450 people from 32 different countries flock to the the Lisbon FIL centre to hear Don Norman, Christian Crumlish, Kristina Halvorson and more. The first talk of the day really put a mark on the presentations that would follow. It was a talk about the elephant in the room in practically every design case I currently work on; big companies are usually chopped up in little departments and those departments do NOT communicate with each other. Lou held a strong plea that those departments should start working together in order to create a better user experience and outweigh your competition. Silos —Lou is a consultant in information architecture and visits lots of companies who need his help with getting their act together. Whenever he asks about search analytics, the voice of the user via the callcenter or usability research reports, most busineses have to ask three different departments about these things. There is nobody who connects theses “silos”, as Lou likes to call them. So if there is a Usability Research Silo and a Customer Support Silo, do you think they should be talking to each other? Or at least connect their findings in some way? Hell yes. And this is not only the case with research and analytics departments. Most of the time, the “brand strategy” is created in Silo A and the persona’s for the screen designs are written in Silo B. And these Silos also do not communicate with each other. These silos are missing out on the combinatorial effect: together they are better than the sum of both when viewed apart. So do we do with these silos? Well, let’s blow them up. Getting there — So how do we blow up these silos? First off, Lou tells us, you have to get out of yours. Visit some other Silos and find out what they know. Work together. Lou concludes his talk with a few pointers you need to keep in mind when establishing a decision making organ. First off: blue sky it. Ask yourself, if you’re going to build a dicision making apparatus, what would it look like? Next: Ban loaded terms and crutches from the discussion, like “omniture”, “user testing”, “market research” and so forth, because these words tend to take the discussion on roads your company has been walking on for too long. And the bottom line: blow up the silos and put people together. “Companies that integrate their silos of insight will outpace their competitors. Play, like design, is both wonderful and available for multiple interpretations – something Christian Crumlish took full advantage of in his wide-ranging talk. Starting off with the analogy of how print designers bemoaned the web’s lack of control, Crumlish suggests that we should be using the concept of play — its original meaning is ‘to dance’, which is apt as we should be thinking about allowing space. Play gives us masks, the chance to have an assumed identity, and the change to carry out re-imaginings (one entomologist is a dedicated participant in Civil War re-enactments to the point that he brings in era appropriate bugs to attack the troops!). He gave a quick overview of what makes games work. Rules are key — what is fair and what is not? Competition — we naturally compete, so that type of environment can help with play. But it’s not only option — collaboration is also a important alternative (the board game Pandemic is a great example of this). The leaderboard can draw people to that and neglect experience — people also like to work together! You don’t need to create a perfect experience, but instead one that’s tuneable. A great example of it is Twitter—you keep tuning it to get what you want (more/less). Extending the metaphor that musical ensembles are about “getting in tune” (choosing what key), he suggested that we choose to “ensemble play” in the key of a certain hashtag. And for those who know anything about Crumlish — he’s known as an avid amateur ukulele player — yes, he finished up the talk with a tune. After sun- and T-shaped thinkers Stephen Anderson decided that it was time to introduce a new type: Z-shaped thinkers. According to him these are people who think beyond the obvious, people that dare to turn the challenge around and take it a step further. “When everyone zigs, zag.” The point that Stephen wants to bring across is that it’s not about the tools, it’s about the thinking process itself. When looking at existing examples Stephen mentions people like Negroponte who dared to embrace the limitations of creating a laptop for children that would at max cost $100. Instead of being blocked by the constraints he managed to turn it around and create a really interesting laptop. Another hero of Stephen is George Lucas. When he started with the Star Wars movies nobody knew how they had to make it, but George Lucas simply said that they had to aim for the result they wanted to have and would find a way to reach it. This way of thinking makes it possible for us as UX designers to really take challenges on and make a difference. But what’s the way to do this? As an example Stephen gave the audience a simple task. First he asked everybody to “Design a vase.” When people did this he turned the challenge around and showed everybody how you should look at the challenge: “Design a better way for people to enjoy flowers in their home.” This simple task really showed everyone what the right approach is. The question that follows this is whether or not a lot of designers ever get the room to rephrase a challenge like this… often the business has a clear description of what they want and it’s difficult to change things around. But that doesn’t mean we must avoid it. It means we need to understand the importance of it and should try and talk to the right people in the right language. And this is where Leisa Reichelt’s workshop on Strategic UX fits in perfectly. … in order to envision unseen opportunities. You know that world of junk that WAL-E lives in, picking up the odd interesting trinket? That’s the way content is these days on the web. Halvorson says that those odd trinkets are the occasional piece of good content she finds in her travels as a content strategist. The elephant in the room of any conversation is where the content for a site will come from and how it will be maintained. To make matters worse, web writers are normally brought in far too late into the picture. Content is not copywriting, The content goes into a messy ecosystem, and has a lifecycle. She points out that while the field has been around for fifteen years, it’s only been recently that UX has started to pay attention to it, perhaps because it never seemed relevant. Even now she points out that UXers may think they don’t have do deal with workflow and governance. However, they do have to ask the right questions. Demonstrate — hold a mirror up to their pain. She showed an example of history.com showing Valentines Day content on the 16th of February, and a paralysing data-dump of all categories. Recognize the life cycle of content — there are a whole lot of models as to the hoops content has to jump through, but it’s most important to understand which must be changed regularly, and by who. Quoting some well known professionals goes a long way to supporting your arguments. Draw — pictures are good. Envision. Decide the picture you want to aim towards. Discussing the conundrum of CMS’s (and their somewhat failed promise), she recommends the blog CMS Myth. Ninety percent of businesses say the cross-channel experience is critical to their business success. Nick Fink talked to us about definitions, methods, tools and examples to help us create a seamless customer/user experience (he believes the names don’t matter as the goal is the same). We need to answer the question: “What is it that we need to do to (sell a bike/let people enjoy a conference)?” and create a seamless experience for our products and services. Businesses and also UXers tend to think in channels, but customers don’t. They don’t think in the silos that businesses create and perceive one business through different channels or touchpoints. So it’s important to craft a coherent cross-channel experience. But how can we do this? Of course we need to gather insights on how people use our products and services. We have to pay attention to detail and look for hacks: e.g. what do people add to a product to enhance the experience. And we need to follow the experience through to the last point and learn the business process behind it. Once you’ve gathered the insights you can create a customer journey map, an experience map or a service blueprint, all of which help you to visualize the cross channel experience. Finck takes Netflix as a good example, because they have matched the different touch points in such a way that the system is pro-active: It knows when you’ve had a problem with its service and proactively compensates you for it. It informs you when it sends a movie or received one back from you and will allow you to engage with its services on any device (iPhone, iPad, TV, laptop, …) This is a sign that Netflix has aligned its stage and backend to serve their audience a seamless experience. The question of businesses is: ” How do we do this?” We need a strategy. We need to have a unified vision of what we’re trying to do. It’s great that Nick Finck talks about the experience beyond the screen, and the theme of breaking down silos is definitely a recurring theme at UXLX (see also Louis Rosenfeld’s talk). As UX’ers we have the skills and tools to help break down the walls, so let’s go out and do it. Let’s get rrrrready to rrrrrrrumbleeeeee! I was getting really excited when I heard the title of this talk (rescheduled from Jeff Veen because of illness).Josh’s presentation was really set up as a match— from the premise, right through to the imagery of each slide (each with some old skool wrestler, boxer or luchadore in a position that reflects the context). I always like guys who put something “extra”, some delighters, in their presentation. The presentation is not backed up by statistics or real life examples, but consists of observations and temporary technical restraints that both contenders inhabit. Then he shows us the commercial for the DroidX phone, in which astronauts find a strange device in space. Within this device they find a phone that kinda integrates with the astronauts arm and forms itself into an Android phone. Did I hear a nerdgasm? The different cultures couldn’t be clearer — iPhone is about emotion, Android features and technology. So making an app for iPhone or Android can be based on your marketing strategy or the way people would want to use the app. Then there is the mobile web. “It’s webtastic. Everybody loves her”. That is because you only need to make one app and your done. You have an instant reach of everyone who owns an iPhone or Android phone (Josh briefly addresses Blackberry, Windows Mobile and Windows Phone 7, but they are irrelevant to the point). So, if everything is a match, there has to be a winner, right? No. There is no winner. Both contenders have their strengths and weaknesses, so it’s comparing apples to oranges. But Josh has a very strong point of view that in order to be something in the world of mobile devices, you should at least have a mobile website. And on top of a mobile website you could, for example, create an app for your most precious customers; an app that provides them with something handy and unique. Josh declares a winner that, in my opinion, is no contender in this match, but plays a whole different sport: the API. True, when you have a good API, building a mobile site and native apps is a breeze, but for me, this outcome was a bit disappointing, given the premise of the talk. So both have their advantages and weaknesses, no shocker there. But why not make an app that hold a frame which hold a mobile website? These Hybrid apps can work and you would have best of both world… right? Not exactly. The problem with an app is that is has to feel like an app. And an iPhone app feels differently from an Android app. So your mobile website must behave accordingly. Ofcourse this can be resolved by creating two mobile websites. Ding ding ding! But we want a winner! For his talk, Dario Buzzini used the detective novel “The Manual of Detection” as a guide to UX practice, backed up with examples from his work at IDEO. Starting with the poetic (and somewhat provocative) statement: “We designers, we write stories not manuals, we design experiences not procedures, strive for beauty not truth”, he picked 11 quotes from the book that had relevance to UX. In relation to skills needed in a job — you need to have more than one in order to seem as if you should be there! On Language — As an investigator, you need to know how to talk the right languages, objects have memory, too. Surgery situation — the nurse is touching the patient’s hand not only to comfort them, but also to measure anaesthetic. realised in surgery situation that the gadget for the nurse with stylus would eventually be used just with thumb! On Leads — follow them, to let them go. Often your first idea may be the best, but it can’t be your only one. IDEO has a parking lot for ideas on their whiteboards, so that designers get their ideas out and move on. On Documentation — most is for the wishing well, not a file . Buzzini stressed that should be actionable (echoing Dan Brown’s talk on documentation the day before). On Nemeses — important to find your opposites. IDEO create partner teams for projects (apps etc) where both sit and work together. Can be difficult but helpful. On Bluffing — If you’re caught in a lie, lie again. On interrogation—the process begins long before you are alone in a room together. By then, you should already know your answers. Cryptology — be careful what you dig up, it’s yours. Designers haven’t helped people with banking, making it hard for them to understand what happens with their money. Banksimple is using diagrams to help with that. On Solutions — a good detective tries to know everything, a great one knows just enough to see him through to the end. On Dream Detection — be careful to check whether what you’ve seen is real or a fallacy. Check exactly who it is you’re designing for. Another interesting tip he provided was to think about where the complexity occurs in a product system (aka Tesler’s Law of the Conservation of Complexity). For example, with coffee machines, in a manual it occurs with the user (making the coffee), a semi-automatic in the machine, a pod model in the packaging. He finally echoed other speakers such as Halvorson with his reminder that it doesn’t matter if a design is bad unless it starts to affect sales. By the end of the day, most people had managed to collect most (if not all) of their UX Trump Cards (apparently Bill Buxton and Robert Hockman Jnr were particularly hard to find) and Mental Notes mini-sets. While the fabulous location was a given, UXLX excelled in running a tight ship — speakers were kept to time so the four rooms never got out of sync, a common problem with conferences — and a line up of quality speakers. It’d be great to see some more local/European speakers (a prime example was how Netflix — a service that isn’t available in Europe— was used as a case study several times), but given the diverse crowd, hopefully some will cross the line from participant to speaker next year. Patrick Sanwikarja is an interaction designer at Fabrique Communication and Design in Delft, The Netherlands. He wants to make the future happen by designing it. Miranda de Groot is a User Experience Specialist & Evangelist at ReedBusiness. Pingback: Innovative UX thinking « rastplatznotizen. Pingback: Apps vs Web sites « rastplatznotizen. Pingback: Don Norman: Living with complexity « rastplatznotizen.
"""This file contains code used in "Think Stats", by Allen B. Downey, available from greenteapress.com Copyright 2010 Allen B. Downey License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html """ import math import matplotlib.pyplot as pyplot import myplot import Pmf def NormalPdf(x): """Computes the PDF of x in the standard normal distribution.""" return math.exp(-x**2/2) / math.sqrt(2 * math.pi) def Linspace(start, stop, n): """Makes a list of n floats from start to stop. Similar to numpy.linspace() """ return [start + (stop-start) * float(i)/(n-1) for i in range(n)] def RenderPdf(mu, sigma, n=101): """Makes xs and ys for a normal PDF with (mu, sigma). n: number of places to evaluate the PDF """ xs = Linspace(mu-4*sigma, mu+4*sigma, n) ys = [NormalPdf((x-mu) / sigma) for x in xs] return xs, ys def main(): xs, ys = RenderPdf(100, 15) n = 34 pyplot.fill_between(xs[-n:], ys[-n:], y2=0.0001, color='blue', alpha=0.2) s = 'Congratulations!\nIf you got this far,\nyou must be here.' d = dict(shrink=0.05) pyplot.annotate(s, [127, 0.02], xytext=[80, 0.05], arrowprops=d) myplot.Plot(xs, ys, clf=False, show=True, title='Distribution of IQ', xlabel='IQ', ylabel='PDF', legend=False ) if __name__ == "__main__": main()
Until it is awkward. Until it is disturbing. Until she cracks. Lilliam Rivera discusses her new novel, DEALING IN DREAMS. I eat, and eat, and eat, and mourn. I’ve only had breast cancer once and already I know too much. Nicole Chung discusses ALL YOU CAN EVER KNOW. We seldom forget when people promise to give us something, whether we need or want that thing or not. I promise you death, you want a death. A comic describing the illustrator’s childhood adventures and misadventures climbing rooftops of all sorts. There is still light in the dark. This is the paradox that Little Bear has to accept in order to fall asleep. “Sister Love” touches on how a traditional family can fail the victim of domestic violence, and how that failure can compound the victim’s trauma. Lidija Dimkovska discusses A Spare Life, living through the break-up of Yugoslavia, her writing style, and where she now feels most at home. The female body here is as palpable as image. As the images and objects transform, so does the female’s body. I wore sobriety like a shirt that was too tight in the shoulders, and everyone around me knew it. Gayle Brandeis discusses her memoir, The Art of Misdiagnosis, out today from Beacon Press. Marisa Crawford’s Reversible is an evocative collection, showcasing the ways in which pop culture saturates us with meaning, and how it teaches us to become. Julie Buntin discusses her debut novel, Marlena, why writing about teenage girls is the most serious thing in the world, and finding truths in fiction. Alana Massey discusses her debut collection, All the Lives I Want, the best piece of writing advice she’s ever received, and acknowledging the work that women do. Naomi Jackson discusses her debut novel, The Star Side of Bird Hill, how she approached writing about mental illness and its affects on a family, and choosing to to tell a story from multiple perspectives. A flash-fire covered the horizon all around and behind her, and my mother glowed genuine blue. I saw her skeleton, or maybe her white-hot soul. Something flew up and around our heads. It would be simple to say that she is missing the internal formulation that makes one enthusiastic about dogs. And that would be true, partially. Was she, as their mother once said, a cold fish? Many days I couldn’t see the way forward, but I kept going, the way you had. It was you, after all, who taught me how to stay.
# -*- coding: utf-8 -*- """ Created on Thu May 12 16:25:02 2016 @author: huliqun """ import requests import json import uuid import base64 _SERVER_HOST = '127.0.0.1' _SERVER_PORT = 8000 _SERVER_BASE_URL = 'http://{0}:{1}/api/users'.format(_SERVER_HOST, _SERVER_PORT) headers = {'content-type':'application/json'} body = '{"username":"wahaha@qq.com","displayname":"wahaha","email":"wahaha@qq.com","password":"123456","mobile":"18698729476"}' #resp = requests.get(_SERVER_BASE_URL) resp = requests.post(_SERVER_BASE_URL, headers=headers,data=body) print(resp.text) print(resp) headers = {'Authorization': '3161cc5a950fead158ebe803f7e56822', 'Account-ID': '111111111111111', 'content-type':'application/json'} password = '123456' #resp = requests.get(_SERVER_BASE_URL, headers=headers,data=body) #print(resp.text) #print(resp) import pyDes import hashlib def md5(s): m = hashlib.md5() m.update(s.encode("utf-8")) return m.digest() # For Python3, you'll need to use bytes, i.e.: # data = b"Please encrypt my data" # k = pyDes.des(b"DESCRYPT", pyDes.CBC, b"\0\0\0\0\0\0\0\0", pad=None, padmode=pyDes.PAD_PKCS5) data = str(uuid.uuid4()).replace('-','') k = pyDes.triple_des(md5('123456'), pyDes.CBC, "\0\0\0\0\0\0\0\0", pad=None, padmode=pyDes.PAD_PKCS5) d = base64.b64encode(k.encrypt(data)).decode() idf = base64.b64encode(k.encrypt('wahaha@qq.com')).decode() headers = {'content-type':'application/json'} bodyData = { 'username':'wahaha@qq.com', 'identifyCode':idf } print(idf) body = json.dumps(bodyData) print("Encrypted: %r" % idf) print("Decrypted: %r" % k.decrypt(base64.b64decode(idf.encode())).decode() ) _SERVER_BASE_URL = 'http://{0}:{1}/api/auth'.format(_SERVER_HOST, _SERVER_PORT) resp = requests.get(_SERVER_BASE_URL, headers=headers,data=body) print(resp.text) print(resp) headers = {'Cookie':'awesession=c7f406241bcc49209eb58a527520e051-1465822334-fe92174a8e3956edc8befc20911a0b54c8f7b2db; Domain=aaaa.com;', 'content-type':'application/json'} _SERVER_BASE_URL = 'http://{0}:{1}/api/users'.format(_SERVER_HOST, _SERVER_PORT) resp = requests.get(_SERVER_BASE_URL, headers=headers,data=body) print(resp.text) print(resp)
I happen to be a doctor of Pastoral Medicine so naturally I am very happy that I developed a system of medicine that serves humankind more effectively than traditional allopathic medicine. When the world economy collapses, modern medicine will suffer a terrible fall. Money will dry up for expensive pharmaceutical medicines that poison more then they cure. Doctors will have to scramble for alternatives if they want to continue to help their patients. But such alternatives have to be based on solid, empirical evidence and research for doctors to pay much attention. My protocol that I call “Natural Allopathic Medicine” is rational and scientifically-based; pharmaceutical medicine is not. Pharmaceutical terrorism (born with I.G. Farben and the Nazi doctors) is a very real and dangerous thing and so is medical insanity, of which there is no shortage. Through the years I have written about these subjects but this week I was introduced to yet another example of pharmaceutical terrorism and medical insanity coming together. The wife of a good friend of mine started having some serious heart problems and was taken to a hospital in Brasilia, the federal capital of Brazil. In seeking a solution to her problem she saw 14 doctors who prescribed 15 different pharmaceuticals, which she is now taking all at the same time. There is not one doctor, pharmaceutical company or medical official who has any idea what will happen when these 15 chemical pharmaceutical drugs are mixed together. Each one has side effects, which will be complicated and amplified by the presence of other drugs. Going into this uncharted treatment protocol, she had heart and lung inflammation. Now she is bleeding from lungs and large intestines. We cannot feel too badly for this woman because she rejected outright a more natural approach. But we can feel badly for her four daughters and her husband who might not have her around for too much longer. Who we trust says a lot about us and determines to a great extent what happens to us in our lives. Trusting the western medical establishment can end one up very dead, bankrupt or having a host of new medical problems that calls doctors to prescribe even more dangerous drugs. One cannot ever hope to practice pharmaceutical medicine safely. Not only are all pharmaceuticals dangerous (even their safest medicines like aspirin are deadly) but none of them have been tested in combination with other drugs. The medical system is making guinea pigs of its patients by blindly prescribing drugs in combination that have never been tested together. Sounds like medical blasphemy, which is what contemporary medicine has embraced. It’s bad enough that doctors use medicines and procedures that cause cancer in their retched way of treating it. No one caught on for decades that pediatricians were injecting babies with heavy metals like mercury and aluminum that lace many vaccines. The list is endless, enough so that hundreds of thousands of people die each year from “properly prescribed” medicines. So really we do need a new medicine that goes mainstream. Contemporary medicine has to be reborn with new principles and practices. As it is it can hardly be seriously considered as anything even close to the appropriate practice of medicine. Unfortunately change comes at a snail’s pace for humans and their institutions. No one is holding their breath waiting for a medical revolution because all the professional fat cats at the top of the medical world are too fond of their privileges and pleasures to throw in the towel or become humble enough to admit the errors of their ways. I have tried my best to address the above and feel like I have succeeded. Natural Allopathic Medicine actually grew out of the best of emergency room and intensive care medicine, both of which apply concentrated nutritional medicines when all else fails. Magnesium chloride and sodium bicarbonate are at the heart of my medical approach and they are the best workhorse medicines emergency departments have. So I am putting out a request to my wide readership to send in their testimonies about their use, in full or in part, of my protocols. This would help me thrust my medicine further out into the world. For testimonies already received, please visit http://blog.imva.info/medicine/natural-allopathic-medicine-cures. The rules and conditions of life are changing and it is going to be a struggle for survival. Our already toxic world is getting more contaminated and radioactive, with modern dentistry and medicine adding significantly to this problem with their use of heavy metals, lethal drugs and radiation. The majority of people are meeting the future with significantly weakened bodies and immune systems because of dangerous deficiencies in iodine, magnesium and other essential minerals and nutrients. The strength of the cells’ adaptive capacity is directly related to nutritional sufficiency and proper cellular respiration that removes toxins and wastes through the cell wall. Cells can respond to stress in various ways ranging from the activation of survival pathways to the initiation of cell death that eventually eliminates damaged cells. Whether cells mount a protective or destructive stress response depends on different factors, but the most important is nutritional status. Bicarbonate deficiencies are also a problem, thus the reason sodium bicarbonate makes such a great medicine. Natural Allopathic Medicine uses only safe, non-toxic, non-synthetic medicinals that do not add to the toxic burden of the body. Most of the items in the protocol can be taken orally, transdermally or administered intravenously. Transdermal applications include topical application directly on the skin, intense medical baths, nebulization into lung tissues or in the use of glutathione suppositories and natural enemas. I never fell into the trap of over-identifying with any single medicinal as the answer to everything or anything. I am a confirmed protocol man and ALWAYS deal with many medicinals simultaneously. But still, no matter how deeply I have gotten into other medicines, magnesium chloride remains the heavyweight champion and ALWAYS needs to be included in EVERYONE’S daily regimen.
import sys, os, csv, pprint, math #sys.path.append(os.path.join(os.path.dirname(__file__), '..')) ## uncomment when running under CLI only version ## #import matplotlib #matplotlib.use('Agg') #sys.path.append("/shared/storage/cs/staffstore/hrm506/simpy-3.0.5/") #sys.path.append("/shared/storage/cs/staffstore/hrm506/networkx-1.10/networkx-1.10") from collections import OrderedDict import numpy as np import traceback from collections import Iterable import re import pylab import random import shutil import math import matplotlib import matplotlib.pyplot as plt plt.style.use('ggplot') from mpl_toolkits.mplot3d import Axes3D import scipy.stats import scipy.optimize as scipy_optimize import itertools from matplotlib.colors import ListedColormap, NoNorm, rgb2hex from matplotlib import mlab from itertools import cycle # for automatic markers import json from operator import itemgetter from scipy import stats from collections import Counter import multiprocessing #from scipy.stats import gaussian_kde import matplotlib.ticker import matplotlib.cm as cm import matplotlib.patches as patches from matplotlib.font_manager import FontProperties from SimParams import SimParams SHOW_PLOTS = True USE_MULTIPROCESSING = False NOC_H = 8 NOC_W = 8 MAX_SEEDS = 30 FAILED_SEEDS = [] #RANDOM_SEEDS =[s for s in RANDOM_SEEDS if s not in FAILED_SEEDS][:MAX_SEEDS] RANDOM_SEEDS_MISC=[33749, 43894, 26358, 80505] RANDOM_SEEDS = RANDOM_SEEDS_MISC print len(set(RANDOM_SEEDS)) #sys.exit() EXP_DATADIR = "Z:/MCASim/experiment_data/hevc_tile_mapping_kaushikTuner/" DATA_TEMP_SAVE_LOC = "../experiment_data/hevc_tile_mapping_kaushikTuner/" CMB_ID = 912 MMP_ID = 0 #global_mp_order = [d['lbl'] for d in global_types_of_tests] KAUSHIK_COMMS_SCALE_FACTOR = [6, 9, 12, 15, 20, 25, 30, 35, 40, 45, 50] WORKLOAD_KEY = "WL2" def _save_data(fname, data): final_fname = DATA_TEMP_SAVE_LOC + fname logfile=open(final_fname, 'w') json_data = json.dumps(data) logfile.write(json_data) logfile.close() def _load_data(fname): final_fname = DATA_TEMP_SAVE_LOC + fname json_data=open(final_fname) data = json.load(json_data) return data def _gen_exp_key (cmb, mmp, ksf): exp_key = "cmb"+str(cmb)+ \ "mmp"+str(mmp)+ \ "ksf"+str(ksf) return exp_key def _get_final_fname(fname, exp_key, wl_cfg, seed): subdir1 = EXP_DATADIR + wl_cfg + "/" + exp_key + "/" subdir2 = subdir1 + "seed_"+str(seed)+"/" fname_prefix = "HEVCTileSplitTest__" + exp_key + "_" + str(NOC_H)+"_"+str(NOC_W)+"_" finalfname_completedtasks = subdir2 + fname_prefix + fname return finalfname_completedtasks def _normalise_list(lst, norm_min=None, norm_max=None): if norm_max == None: norm_max = np.max(lst) if norm_min == None: norm_min = np.min(lst) new_list = [] for each_l in lst: x = each_l norm_val = (x-norm_min)/(norm_max-norm_min) new_list.append(norm_val) return new_list def boxplot_colorize(bp, param_col, fc='#B8DCE6'): i=0 ## change outline color, fill color and linewidth of the boxes for box in bp['boxes']: # change outline color box.set( color='#000000', linewidth=1) # change fill color box.set( facecolor = param_col) i+=1 ## change color and linewidth of the whiskers for whisker in bp['whiskers']: whisker.set(color='#000000', linewidth=1, linestyle='-') ## change color and linewidth of the caps for cap in bp['caps']: cap.set(color='#000000', linewidth=1) ## change color and linewidth of the medians for median in bp['medians']: median.set(color='#000000', linewidth=1) ## change the style of fliers and their fill for flier in bp['fliers']: flier.set(marker='x', color='red', alpha=0.5) def plot_CommsOverhead_and_GoPLateness_Combined(load_data=False, show_plots=False): data_fname_comms = "plot_comms.json" data_fname_gopl = "plot_gopl.json" ### get data #### alldata_perseed_commsoverhead = OrderedDict() alldata_perseed_goplateness = OrderedDict() if load_data==True: alldata_perseed_commsoverhead = _load_data(data_fname_comms) alldata_perseed_goplateness = _load_data(data_fname_gopl) else: for each_ksf in KAUSHIK_COMMS_SCALE_FACTOR: alldata_perseed_commsoverhead[each_ksf] = None alldata_perseed_goplateness[each_ksf] = None # which exp condition ? exp_key = _gen_exp_key( CMB_ID, MMP_ID, each_ksf ) exp_lbl = each_ksf each_seed_data_comms = [] each_seed_data_goplateness = [] for each_seed in RANDOM_SEEDS: # get filename finalfname_comms = _get_final_fname("_flwcompletedshort.js", exp_key, WORKLOAD_KEY, each_seed) finalfname_gopsummary = _get_final_fname("_gopsopbuffsumm.js", exp_key, WORKLOAD_KEY, each_seed) try: print "getting : ", finalfname_comms ## get file data json_data=open(finalfname_comms) file_data = json.load(json_data) flows_bl = [f[0] for f in file_data['flows_completed'] if f[2] in [1,15]] flows_payload = [_get_payload_from_flowbl(bl) for bl in flows_bl] flows_bl_sum = np.sum(flows_payload) # save each_seed_data_comms.append(flows_bl_sum) print "getting : ", finalfname_gopsummary ## get file data json_data=open(finalfname_gopsummary) file_data = json.load(json_data) gop_lateness_dist = [g['gop_execution_lateness'] for gid, g in file_data.iteritems()] # save each_seed_data_goplateness.extend(gop_lateness_dist) except Exception, e: tb = traceback.format_exc() print tb sys.exit(e) alldata_perseed_commsoverhead[each_ksf] = each_seed_data_comms alldata_perseed_goplateness[each_ksf] = each_seed_data_goplateness # save data if load_data==False: _save_data(data_fname_comms, alldata_perseed_commsoverhead) _save_data(data_fname_gopl, alldata_perseed_goplateness) if show_plots==False: return ### plot data #### fig, ax1 = plt.subplots() fig.canvas.set_window_title('plot_GopL_CommsOvh_Combined') ydata_comms = [np.mean(alldata_perseed_commsoverhead[str(k)]) for k in KAUSHIK_COMMS_SCALE_FACTOR] ydata_gopl = [alldata_perseed_goplateness[str(k)] for k in KAUSHIK_COMMS_SCALE_FACTOR] xdata = np.arange(len(KAUSHIK_COMMS_SCALE_FACTOR)) ax1.boxplot(ydata_gopl, positions=xdata) ax1.set_ylabel('GoPLateness') ax2 = ax1.twinx() ax2.plot(xdata, ydata_comms, 'r-', linewidth=2) ax2.set_ylabel('CommOverhead', color='r') print "---" plt.grid(axis='y',b=True, which='major', color='k', linestyle='--', alpha=0.3) plt.grid(axis='y',b=True, which='minor', color='k', linestyle='-', alpha=0.2) plt.minorticks_on() ax1.tick_params(axis = 'y', which = 'both') ax2.tick_params(axis = 'y', which = 'both') #plt.ticklabel_format(style='sci', axis='x', scilimits=(0,0), labelsize=20) #plt.tick_params(axis='both', which='major', labelsize=16) #plt.tick_params(axis='both', which='minor', labelsize=16) #plt.rc('font', **{'size':'16'}) #ax.set_xticks(ind+0.5) ax1.set_xticks(xdata) ax1.set_xticklabels(KAUSHIK_COMMS_SCALE_FACTOR, rotation=40) def _get_payload_from_flowbl(flw_bl): p = SimParams.NOC_PERIOD payload = (16.0*(flw_bl - (70.0*p*p)))/p return payload def _write_formatted_file(fname, data, format): if(format == "pretty"): logfile=open(fname, 'w') pprint(data, logfile, width=128) elif(format == "json"): logfile=open(fname, 'w') json_data = json.dumps(data) logfile.write(json_data) else: logfile=open(fname, 'w') pprint(data, logfile, width=128) def func_fit_data(x, a, b, c): return a * np.exp(-b * x) + c ################################### # HELPERS ################################### ################################### # MAIN ################################### if __name__ == "__main__": plot_CommsOverhead_and_GoPLateness_Combined(load_data=True, show_plots=True) plt.show() class LogFormatterTeXExponent(pylab.LogFormatter, object): """Extends pylab.LogFormatter to use tex notation for tick labels.""" def __init__(self, *args, **kwargs): super(LogFormatterTeXExponent, self).__init__(*args, **kwargs) def __call__(self, *args, **kwargs): """Wrap call to parent class with change to tex notation.""" label = super(LogFormatterTeXExponent, self).__call__(*args, **kwargs) label = re.sub(r'e(\S)0?(\d+)', r'\\times 10^{\1\2}', str(label)) label = "$" + label + "$" return label
The fun begins after you buy your dream BMW car or SUV from BMW of Dayton. Keep your luxury vehicle in prime condition with regular visits to our BMW service center in Dayton, Ohio. We offer routine maintenance, warranty services, as well as more advanced services like engine repairs. As a certified DINAN retailer in the Dayton area, our service professionals utilize genuine DINAN performance parts to keep your BMW vehicle in top form. You'll never get anything but the best at our dealership. Get in touch to learn more! Service your BMW vehicle at your trusted Dayton, OH BMW dealership. When you buy a luxury BMW sedan or SUV from BMW of Dayton, you want to make sure it runs to its potential. Visit our BMW repair center in Dayton, OH and our friendly service professionals will treat you and your vehicle with the attention and care you deserve. From routine oil changes to brake system repairs, engine diagnostics to transmission services, our team is here to help. Call our Dayton, OH BMW service center today to schedule your next appointment. If your car is in need of a regular tune-up or extensive repairs, contact BMW of Dayton. Our highly trained service team is here for you and your luxury vehicle every step of the way. If you're ready for your next appointment, schedule your BMW service in Dayton, OH by contacting our professional service center today.
# -*- coding: utf-8 -*- # All language names have had the words characters, languages and language removed and all remaining words capitalized # 1. The list of languages in ISO 639-1 is from the standard: http://www.loc.gov/standards/iso639-2/ # 2. The list of languages in wikidata was taken from the 'original language of work' properties of all books in the dump from # 20160215, and may not be complete. # These are the primary language names and their codes from ISO 639-1, or in rare cases, from ISO 639-2 # nameToIso639Id = { "Abkhazian": "ab", "Afar": "aa", "Afrikaans": "af", # Found in wikidata "Akan": "ak", "Albanian": "sq", "Algonquian": "alg", # No similar language in ISO 639-1 "Amharic": "am", # Found in wikidata "Ancient Greek": "grc", # Found in wikidata "Arabic": "ar", # Found in wikidata "Aragonese": "an", # Found in wikidata "Aramaic": "arc", # Found in wikidata "Armenian": "hy", # Found in wikidata "Assamese": "as", # Found in wikidata "Avaric": "av", "Avestan": "ae", "Awadhi": "awa", # Found in wikidata "Aymara": "ay", "Azerbaijani": "az", # Found in wikidata "Bambara": "bm", "Bantu": "bnt", # No similar language in ISO 639-1 "Bashkir": "ba", "Basque": "eu", # Found in wikidata "Belarusian": "be", # Found in wikidata "Bengali": "bn", # Found in wikidata "Berber": "ber", # No similar language in ISO 639-1 "Bihari": "bh", "Bislama": "bi", "Bosnian": "bs", # Found in wikidata "Breton": "br", # Found in wikidata "Bulgarian": "bg", # Found in wikidata "Burmese": "my", # Found in wikidata "Catalan": "ca", # Found in wikidata "Central Khmer": "km", "Chamorro": "ch", "Chechen": "ce", "Chichewa": "ny", "Chinese": "zh", # Found in wikidata "Chuvash": "cv", "Cornish": "kw", "Corsican": "co", "Cree": "cr", "Croatian": "hr", # Found in wikidata "Czech": "cs", # Found in wikidata "Danish": "da", # Found in wikidata "Divehi": "dv", "Dutch": "nl", # Found in wikidata "Dzongkha": "dz", "English": "en", "Esperanto": "eo", # Found in wikidata "Estonian": "et", # Found in wikidata "Ewe": "ee", "Faroese": "fo", # Found in wikidata "Fijian": "fj", "Filipino": "fil", # Found in wikidata "Finnish": "fi", # Found in wikidata "French": "fr", # Found in wikidata "Fulah": "ff", "Galician": "gl", # Found in wikidata "Ganda": "lg", "Georgian": "ka", # Found in wikidata "German": "de", # Found in wikidata "Greek": "el", # Found in wikidata "Guaraní": "gn", "Gujarati": "gu", # Found in wikidata "Haitian": "ht", "Hausa": "ha", "Hawaiian": "haw", # No similar language in ISO 639-1 "Hebrew": "he", # Found in wikidata "Herero": "hz", "Hindi": "hi", # Found in wikidata "Hiri Motu": "ho", "Hmong": "hmn", # Found in wikidata "Hungarian": "hu", # Found in wikidata "Icelandic": "is", # Found in wikidata "Ido": "io", "Igbo": "ig", "Indonesian": "id", # Found in wikidata "Interlingua": "ia", "Interlingue": "ie", "Inupiaq": "ik", "Irish": "ga", # Found in wikidata "Italian": "it", # Found in wikidata "Inuktitut": "iu", "Japanese": "ja", # Found in wikidata "Javanese": "jv", "Judeo-Arabic": "jrb", # Found in wikidata "Kalaallisut": "kl", "Kannada": "kn", # Found in wikidata "Kanuri": "kr", "Karelian": "krl", # Found in wikidata "Kashubian": "csb", # Found in wikidata "Kashmiri": "ks", "Kazakh": "kk", "Kikuyu": "ki", "Kinyarwanda": "rw", "Kirundi": "rn", "Komi": "kv", "Kongo": "kg", "Konkani": "kok", # Found in wikidata "Korean": "ko", # Found in wikidata "Kurdish": "ku", "Kwanyama": "kj", "Kyrgyz": "ky", "Ladino": "lad", # Found in wikidata "Latin": "la", # Found in wikidata "Latvian": "lv", "Luxembourgish": "lb", "Limburgish": "li", "Lingala": "ln", "Lao": "lo", "Lithuanian": "lt", # Found in wikidata "Luba-Katanga": "lu", "Manx": "gv", "Macedonian": "mk", # Found in wikidata "Malagasy": "mg", "Malay": "ms", # Found in wikidata "Malayalam": "ml", # Found in wikidata "Maltese": "mt", "Manx": "gv", "Maori": "mi", "Marathi": "mr", # Found in wikidata "Marshallese": "mh", "Mayan": "myn", # No similar language in ISO 639-1 "Mongolian": "mn", "Nahuatl": "nah", # Found in wikidata. No similar language in ISO 639-1 "Nauru": "na", "Navajo": "nv", "Ndonga": "ng", "Neapolitan": "nap", # Found in wikidata "Nepali": "ne", # Found in wikidata "North Ndebele": "nd", "Northern Sami": "se", "Norwegian": "no", # Found in wikidata "Norwegian Bokmål": "nb", "Norwegian Nynorsk": "nn", "Nuosu": "ii", "Southern Ndebele": "nr", "Occitan": "oc", # Found in wikidata "Ojibwe": "oj", "Old Church Slavonic": "cu", # Found in wikidata "Old Norse": "non", # Found in wikidata "Oriya": "or", "Oromo": "om", "Ossetian": "os", "Pali": "pi", # Found in wikidata "Pashto": "ps", "Persian": "fa", # Found in wikidata "Polish": "pl", # Found in wikidata "Portuguese": "pt", # Found in wikidata "Prakrit": "pra", # Found in wikidata "Punjabi": "pa", # Found in wikidata "Quechua": "qu", "Romansh": "rm", # Found in wikidata "Romanian": "ro", # Found in wikidata "Rundi": "rn", "Russian": "ru", # Found in wikidata "Samoan": "sm", "Sango": "sg", "Sanskrit": "sa", # Found in wikidata "Sardinian": "sc", "Scottish Gaelic": "gd", # Found in wikidata "Serbian": "sr", # Found in wikidata "Shona": "sn", "Sicilian": "scn", # Found in wikidata "Sindhi": "sd", # Found in wikidata "Sinhala": "si", # Found in wikidata "Slovak": "sk", # Found in wikidata "Slovenian": "sl", # Found in wikidata "Somali": "so", "South Ndebele": "nr", "Southern Sotho": "st", "Spanish": "es", # Found in wikidata "Sundanese": "su", "Swahili": "sw", "Swati": "ss", "Swedish": "sv", # Found in wikidata "Tahitian": "ty", "Tajik": "tg", "Tamil": "ta", # Found in wikidata "Tatar": "tt", "Telugu": "te", # Found in wikidata "Tajik": "tg", "Thai": "th", # Found in wikidata "Tigrinya": "ti", "Tibetan": "bo", "Tswana": "tn", "Tonga": "to", "Tsonga": "ts", "Tswana": "tn", "Turkish": "tr", # Found in wikidata "Turkmen": "tk", "Tsonga": "ts", "Tatar": "tt", "Twi": "tw", "Tahitian": "ty", "Uyghur": "ug", "Ukrainian": "uk", # Found in wikidata "Urdu": "ur", # Found in wikidata "Uto-Aztecan": "azc", # No similar language in ISO 639-1 "Uzbek": "uz", "Venda": "ve", "Vietnamese": "vi", # Found in wikidata "Volapük": "vo", "Walloon": "wa", "Welsh": "cy", # Found in wikidata "Wolof": "wo", "Western Frisian": "fy", "Xhosa": "xh", "Yiddish": "yi", # Found in wikidata "Yoruba": "yo", "Zhuang": "za", "Zulu": "zu" } # These are aliases from ISO 639-1 and wikidata, and the closest ISO 639-1 codes (or, in a few cases, 639-2 codes). The wikidata # language aliases have been forced to the nearest 639-1 code whereever possible, with comments indicating that there is a better # fit in 639-2 if that is the case. This was done to keep the number of lanaguage codes to a minimum, but it means (for example) # that all Aryan languages map to "hi" (Hindi) # aliasToIso639Id = { "American English": "en", # From wikidata "Australian English": "en", # From wikidata "Austrian German": "de", # From wikidata "Bahasa Melayu Sabah": "ms", # From wikidata "Bangla": "bn", "Bhojpuri": "bh", u"Bokm\u00e5l": "nb", # From wikidata "Brazil": "pt", # From wikidata "Brazil Portuguese": "pt", # From wikidata "Brazilian Portuguese": "pt", # From wikidata "British English": "en", # From wikidata "Burgundian": "de", # From wikidata: Similar to Gothic, which has its own code in ISO 639-2, "got" "Canadian English": "en", # From wikidata "Castilian": "es", u"Catal\u00e1n": "ca", # From wikidata "Central": "bo", "Chewa": "ny", "Chuang": "za", "Church Slavic": "cu", "Church Slavonic": "cu", # From ISO 639-1, found in wikidata "Classical Armenian": "hy", # From wikidata "Classical Chinese": "zh", # From wikidata "Classical Nahuatl": "nah", # From wikidata "Common Brittonic": "br", # From wikidata "Dhivehi": "dv", "Early Modern English": "en", # From wikidata "Early Modern Spanish": "es", # From wikidata "Early New High German": "de", # From wikidata "Egyptian Arabic": "ar", # From wikidata "Farsi": "fa", "Tagalog": "fil", # Found in wikidata. Intentionally categorized as Filipino, not "tl" (Tagalog). "Flemish": "nl", # From ISO 639-2, found in wikidata "France": "fr", # From wikidata "Fulah": "ff", "Gaelic": "gd", "Geordie Dialect": "en", # From wikidata "Gikuyu": "ki", "Greenlandic": "kl", "Haitian Creole": "ht", "Hawaiian Pidgin": "haw", # From wikidata "Hiberno-English": "en", # From wikidata "Hopi": "azc", # From wikidata. Not in ISO 639-1. An Uto-Aztecan language: "azc" in ISO 639-5 "Indian English": "en", # From wikidata "Italiano Moderno": "it", # From wikidata "Jamaican Patois": "en", # From wikidata. No similar in ISO 639-1. English based creole: "cpe" in ISO 639-2 "Kalaallisut": "kl", "Kanbun": "ja", # From wikidata. Annotated Classical Chinese that can be read in Japanese "Katharevousa": "el", # From wikidata "Kerewe": "bnt", # From wikidata. No similar in ISO 639-1. A Bantu language: "bnt" in ISO 639-2 "Khmer": "km", # From in wikidata "Kirghiz": "ky", "Koine Greek": "el", # From wikidata "Kuanyama": "kj", "Late Old Japanese": "ja", # From wikidata "Letzeburgesch": "lb", "Limburgan": "li", "Limburger": "li", "Luganda": "lg", "Magahi": "bh", "Maghrebi Arabic": "ar", # From wikidata "Maithili": "bh", # From ISO 639-1. Found in wikidata "Malaysian": "ms", # From wikidata "Malay Trade And Creole": "ms", # From wikidata "Maldivian": "dv", "Mandarin Chinese": "zh", # From wikidata "Manglish": "en", # From wikidata "Massachusett": "alg", # From wikidata. No similar in ISO 639-1. An Algonquian language: "alg" ISO 639-2 "Medieval Latin": "la", # From wikidata "Middle English": "en", # From wikidata. Middle English has its own code in ISO 639-2, "enm" "Middle French": "fr", # From wikidata. Middle French has its own code in ISO 639-2, "frm" "Mittelalterliches Aragonesisch": "an", # From wikidata "Modern Greek": "el", # From wikidata "Moldavian": "ro", "Moldovan": "ro", "Mon": "km", # From wikidata. Mon-Khnmer languages have there own code in ISO 639-2, "mkh" "Navaho": "nv", "Netherlands": "nl", # From wikidata "Nigerian Pidgin": "en", # From wikidata. No similar in ISO 639-1. English based creole: "cpe" in ISO 639-2 "Nyanja": "ny", "Nynorsk": "nn", # From wikidata "Occidental": "ie", "Odia": "hi", # From wikidata "Ojibwa": "oj", "Old Bulgarian": "cu", "Old Chinese": "zh", # From wikidata "Old East Slavic": "cu", # From wikidata "Old French": "fr", # From wikidata. Old French has its own code in ISO 639-2, "fro" "Old Slavonic": "cu", "Old Spanish": "es", # From wikidata "Ossetic": "os", "Panjabi": "pa", "Philippine English": "en", # From wikidata "Pulaar": "ff", "Pular": "ff", "Pushto": "ps", "Quebec French": "fr", # From wikidata u"Radical Bokm\u00e5l": "nb", # From wikidata "Ruthenian": "cu", # From wikidata "Scots": "gd", # From wikidata "Scottish English": "en", # From wikidata "Serbo-Croatian": "sr", # From wikidata "Shan": "th", # From wikidata. Tai languages have there own code in ISO 639-2, "tai" "Sichuan Yi": "ii", "Sinhalese": "si", "Slovene": "sl", # From in wikidata "Spanish In The Philippines": "es", # From wikidata "Standard Chinese": "zh", # From wikidata "Taglish": "fil", # From wikidata. Tagalog using some English words "Tuareg": "ber", # From wikidata. No similar in ISO 639-1. A Berber language: "ber" from ISO 639-2 "Tibetan Standard": "bo", "Traditional Chinese": "zh", # From wikidata "Uighur": "ug", "Valencian": "ca", # From wikidata "Western Armenian": "hy", # From wikidata "Written Vernacular Chinese": "zh", # From wikidata "Yucatec Maya": "myn" # From wikidata. No similar language in ISO 639-1. "myn" is from ISO 639-2 } iso639IdToName = None # Constructed on the first call to isoIdToName def nameToIsoId(name): words = name.split(" ") for i in reversed(range(len(words))): if words[i] == "characters" or words[i] == "language" or words[i] == "languages": del words[i] continue words[i] = words[i][0].upper() + words[i][1:] name = " ".join(words) if name in nameToIso639Id: return nameToIso639Id[name] if name in aliasToIso639Id: return aliasToIso639Id[name] raise KeyError(name) def isoIdToName(isoId): global iso639IdToName if not iso639IdToName: iso639IdToName = {} for name in nameToIso639Id: iso639IdToName[nameToIso639Id[name]] = name if isoId in iso639IdToName: return iso639IdToName[isoId] raise KeyError(isoId)
Yellow Jacket Volleyball Camps are a challenging, fun time for girls of all skill levels and ages! All camps feature one-on-one coaching from the Yellow Jacket volleyball staff & players, a camp t-shirt, and housing for overnight campers. Get ready to have some fun and learn a ton with the Yellow Jacket Coaching Staff! Open to girls, grades 8th-12th! Overall skill training and development as well as position specific training. Sand Training clinics are designed for experienced players, aged 12-18, who are looking for skill instruction and conditioning. Open to girls, grades 8th-12th grade who desire to play college volleyball. Our Skills Overnight Camp is designed for players entering grades 6-12 focusing on all aspects of the game with an emphasis on fundamentals. Our Elite Overnight Camp is designed for players entering grades 9-12 focused on high level training. This clinic is designed for players entering grades 9-12. The clinic will focus specifically on individual positions. Our Team Camp offers High School teams the chance to learn new skills, team offensive/defensive systems, and a unique opportunity to train together in preparation for the upcoming season. Yellow Jacket volleyball camps are coached by the Georgia Tech Volleyball Staff who have coaching and playing experience at the high school, college and professional levels. The GTVB Staff will be assisted by other great volleyball coaches and trainers.
#!/usr/bin/env python import sys, os import chpl_compiler import chpl_platform from utils import memoize import utils @memoize def get(): comm_val = os.environ.get('CHPL_COMM') if not comm_val: platform_val = chpl_platform.get('target') compiler_val = chpl_compiler.get('target') # use ugni on cray-x* machines using the module and supported compiler # # Check that target arch is not knc. Don't use chpl_arch.get(), though, # since it already calls into this get() function. This check only # happens for X* systems using the Cray programming environment, so it # is safe to assume the relevant craype module will be used that sets # CRAY_CPU_TARGET. if (platform_val.startswith('cray-x') and utils.using_chapel_module() and compiler_val in ('cray-prgenv-gnu', 'cray-prgenv-intel') and os.getenv('CRAY_CPU_TARGET', '') != 'knc'): comm_val = 'ugni' # automatically uses gasnet when on a cray-x* or cray-cs machine elif platform_val.startswith('cray-'): comm_val = 'gasnet' else: comm_val = 'none' return comm_val def _main(): comm_val = get() sys.stdout.write("{0}\n".format(comm_val)) if __name__ == '__main__': _main()
Create beautiful golden eyes with these avatar contact lenses. These stunning contact lenses have a multitonal yellow-gold iris that makes your eyes look larger, brighter and more defined. They create a gorgeous avatar eye and really make a statement. Funky Contact Lenses make it easy to transform your look – perfect for Halloween, fancy dress and edgy fashion statements.
""" This is an example settings/local.py file. These settings overrides what's in settings/base.py """ from . import base # To extend any settings from settings/base.py here's an example: INSTALLED_APPS = base.INSTALLED_APPS + ('django_nose',) MIDDLEWARE_CLASSES = base.MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware') # Define your database connections DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'cashflow', 'USER': 'brent', 'PASSWORD': 'weasel', 'HOST': 'localhost', 'PORT': '', #'OPTIONS': { # 'init_command': 'SET storage_engine=InnoDB', # 'charset' : 'utf8', # 'use_unicode' : True, #}, #'TEST_CHARSET': 'utf8', #'TEST_COLLATION': 'utf8_general_ci', }, # 'slave': { # ... # }, } # Recipients of traceback emails and other notifications. ADMINS = ( # ('Your Name', 'your_email@domain.com'), ) MANAGERS = ADMINS EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } # SECURITY WARNING: don't run with debug turned on in production! # Debugging displays nice error messages, but leaks memory. Set this to False # on all server instances and True only for development. DEBUG = TEMPLATE_DEBUG = True # Is this a development instance? Set this to True on development/master # instances and False on stage/prod. DEV = True # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = [] # SECURITY WARNING: keep the secret key used in production secret! # Hardcoded values can leak through source control. Consider loading # the secret key from an environment variable or a file instead. SECRET_KEY = '+hr2(b6t#wa(x2pc_94pudje_%n^#88_kt49xz5q2vkubd=w%(' # Uncomment these to activate and customize Celery: # CELERY_ALWAYS_EAGER = False # required to activate celeryd # BROKER_HOST = 'localhost' # BROKER_PORT = 5672 # BROKER_USER = 'django' # BROKER_PASSWORD = 'django' # BROKER_VHOST = 'django' # CELERY_RESULT_BACKEND = 'amqp' ## Log settings # Remove this configuration variable to use your custom logging configuration LOGGING_CONFIG = None LOGGING = { 'version': 1, 'loggers': { 'cashflow_project': { 'level': "DEBUG" } } } INTERNAL_IPS = ('127.0.0.1')
TEHRAN – Reacting to the twin terrorist attacks in Tehran on Wednesday, Supreme Leader Ayatollah Ali Khamenei said the Iranian nation and government won’t be cowed by the “firecrackers”. “Today’s firecrackers will not influence the people’s will,” the Leader told hundreds of students in Tehran. “Do not forget idealism.” This was the first recommendation of the Leader of the Islamic Revolution to student associations. In the seventh recommendation, he advised student associations on “religiosity and worship in deeds and words” and asked them to act according to religious principles and divine responsibility. expressed the viewpoints and concerns of the student community for more than two hours.
from __future__ import absolute_import import re from agms.request.request import Request from agms.exception.request_validation_exception import RequestValidationException class HPPRequest(Request): """ A class representing AGMS HPP Request objects. """ def __init__(self, op): Request.__init__(self,op) self._fields = { 'TransactionType': {'setting': '', 'value': ''}, 'Amount': {'setting': '', 'value': ''}, 'Tax': {'setting': '', 'value': ''}, 'Shipping': {'setting': '', 'value': ''}, 'OrderDescription': {'setting': '', 'value': ''}, 'OrderID': {'setting': '', 'value': ''}, 'PONumber': {'setting': '', 'value': ''}, 'RetURL': {'setting': '', 'value': ''}, 'ACHEnabled': {'setting': '', 'value': ''}, 'SAFE_ID': {'setting': '', 'value': ''}, 'Donation': {'setting': '', 'value': ''}, 'UsageCount': {'setting': '', 'value': '9999999'}, 'Internal': {'setting': '', 'value': ''}, 'FirstName': {'setting': '', 'value': ''}, 'LastName': {'setting': '', 'value': ''}, 'Company': {'setting': '', 'value': ''}, 'Address1': {'setting': '', 'value': ''}, 'Address2': {'setting': '', 'value': ''}, 'City': {'setting': '', 'value': ''}, 'State': {'setting': '', 'value': ''}, 'Zip': {'setting': '', 'value': ''}, 'Country': {'setting': '', 'value': ''}, 'Phone': {'setting': '', 'value': ''}, 'Fax': {'setting': '', 'value': ''}, 'EMail': {'setting': '', 'value': ''}, 'Website': {'setting': '', 'value': ''}, 'ShippingFirstName': {'setting': '', 'value': ''}, 'ShippingLastName': {'setting': '', 'value': ''}, 'ShippingCompany': {'setting': '', 'value': ''}, 'ShippingAddress1': {'setting': '', 'value': ''}, 'ShippingAddress2': {'setting': '', 'value': ''}, 'ShippingCity': {'setting': '', 'value': ''}, 'ShippingState': {'setting': '', 'value': ''}, 'ShippingZip': {'setting': '', 'value': ''}, 'ShippingCountry': {'setting': '', 'value': ''}, 'ShippingEmail': {'setting': '', 'value': ''}, 'ShippingPhone': {'setting': '', 'value': ''}, 'ShippingFax': {'setting': '', 'value': ''}, 'ProcessorID': {'setting': '', 'value': ''}, 'TransactionID': {'setting': '', 'value': ''}, 'Tracking_Number': {'setting': '', 'value': ''}, 'Shipping_Carrier': {'setting': '', 'value': ''}, 'IPAddress': {'setting': '', 'value': ''}, 'Custom_Field_1': {'setting': '', 'value': ''}, 'Custom_Field_2': {'setting': '', 'value': ''}, 'Custom_Field_3': {'setting': '', 'value': ''}, 'Custom_Field_4': {'setting': '', 'value': ''}, 'Custom_Field_5': {'setting': '', 'value': ''}, 'Custom_Field_6': {'setting': '', 'value': ''}, 'Custom_Field_7': {'setting': '', 'value': ''}, 'Custom_Field_8': {'setting': '', 'value': ''}, 'Custom_Field_9': {'setting': '', 'value': ''}, 'Custom_Field_10': {'setting': '', 'value': ''}, 'HPPFormat': {'setting': '', 'value': ''}, 'StartDate': {'setting': '', 'value': ''}, 'EndDate': {'setting': '', 'value': ''}, 'StartTime': {'setting': '', 'value': ''}, 'EndTime': {'setting': '', 'value': ''}, 'SuppressAutoSAFE': {'setting': '', 'value': ''}, } self._optionable = [ 'FirstName', 'LastName', 'Company', 'Address1', 'Address2', 'City', 'State', 'Zip', 'Country', 'Phone', 'Fax', 'EMail', 'Website', 'Tax', 'Shipping', 'OrderID', 'PONumber', 'ShippingFirstName', 'ShippingLastName', 'ShippingCompany', 'ShippingAddress1', 'ShippingAddress2', 'ShippingCity', 'ShippingState', 'ShippingZip', 'ShippingCountry', 'ShippingEmail', 'ShippingPhone', 'ShippingFax', 'ShippingTrackingNumber', 'ShippingCarrier', 'Custom_Field_1', 'Custom_Field_2', 'Custom_Field_3', 'Custom_Field_4', 'Custom_Field_5', 'Custom_Field_6', 'Custom_Field_7', 'Custom_Field_8', 'Custom_Field_9', 'Custom_Field_10' ] self._numeric = [ 'Amount', 'Tax', 'Shipping', 'ProcessorID', 'TransactionID', 'CheckABA', 'CheckAccount', 'CCNumber', 'CCExpDate' ] self._enums = { 'TransactionType': ['sale', 'auth', 'safe only', 'capture', 'void', 'refund', 'update', 'adjustment'], 'Shipping_Carrier': ['ups', 'fedex', 'dhl', 'usps', 'UPS', 'Fedex', 'DHL', 'USPS'], 'HPPFormat': ['1', '2'] } self._boolean = ['Donation', 'AutoSAFE', 'SupressAutoSAFE'] self._date = ['StartDate', 'EndDate'] self._digit_2 = ['State', 'ShippingState'] self._amount = ['Amount', 'TipAmount', 'Tax', 'Shipping'] self._required = ['TransactionType'] # Override mapping with api-specific field maps self._mapping['shipping_tracking_number'] = 'Tracking_Number' self._mapping['shipping_carrier'] = 'Shipping_Carrier' def validate(self): # All sales and auth require an amount unless donation if ((not self._fields['Donation']['value'] or self._fields['Donation']['value'] is not False) and (self._fields['TransactionType']['value'] == 'sale' or self._fields['TransactionType']['value'] == 'auth')): self._required.append('Amount') error_array = self._auto_validate() errors = error_array['errors'] messages = error_array['messages'] # ExpDate MMYY if ('CCExpDate' in self._fields.keys() and self._fields['CCExpDate']['value'] and (len(self._fields['CCExpDate']['value']) != 4 or not re.match("^(0[1-9]|1[0-2])([0-9][0-9])$", self._fields['CCExpDate']['value']))): errors += 1 messages.append('CCExpDate (credit card expiration date) must be MMYY.') # CCNumber length if ('CCNumber' in self._fields.keys() and self._fields['CCNumber']['value'] and len(self._fields['CCNumber']['value']) != 16 and len(self._fields['CCNumber']['value']) != 15): errors += 1 messages.append('CCNumber (credit card number) must be 15-16 digits long.') # ABA length if ('CheckABA' in self._fields.keys() and self._fields['CheckABA']['value'] and len(self._fields['CheckABA']['value']) != 9): errors += 1 messages.append('CheckABA (routing number) must be 9 digits long.') self.validate_errors = errors self.validate_messages = messages if errors == 0: return {'errors': errors, 'messages': messages} else: raise RequestValidationException('Request validation failed with ' + ' '.join(messages)) def get_fields(self): fields = self._get_field_array() if 'AutoSAFE' in fields.keys(): if fields['AutoSAFE'] is True: fields['AutoSAFE'] = 1 else: fields['AutoSAFE'] = 0 if 'SuppressAutoSAFE' in fields.keys(): if fields['SuppressAutoSAFE'] is True: fields['SuppressAutoSAFE'] = 1 else: fields['SuppressAutoSAFE'] = 0 return fields def get_params(self, request): return {'objparameters': request}
Finally the long-awaited OpenCart 2.0 has been released to the public for use. The new version of our favourite ecommerce platform has a whole host of major new features and improvements. Here are some of them. Pretty much all new websites need to be responsive these days so that they adapt to look optimized on mobile phones and tablets. Many developers, when creating a new theme for OpenCart, base this on the default theme that comes with the platform so it's great that this new theme now responds beautifully to the device it's being viewed on. Only part of the page is visible in OpenCart 1.5.6 The theme elegantly puts all elements in place, reducing the size of the main banner and adding a "hamburger" menu. Although usually simple for web developers, installing an extension or theme used to be a little daunting for most merchants. They would have to unzip their extension and then upload it via FTP, hoping that they wouldn't destroy their site. OpenCart 2.0 comes with an extension installer that allows you to click a button and upload the extension to automatically install. As well as a responsive front-end, you can now easily access the OpenCart backend on your mobile or tablet. Great news for merchants who want to update and edit products while in the warehouse or showroom. This shows what's happening on your site, in real time. This is great news if you want to get to know your customers a little better. Just add some additional fields in the Sales > Customers > Custom Fields section and customers will then be able to fill these in when they register. Are you using OpenCart 2.0 already? What do you think? Looking to upgrade, but wondering what's involved? Let us know in the comments below!
# coding=utf-8 from __future__ import unicode_literals import logging from medusa.logger.adapters.style import BraceAdapter from six import text_type log = BraceAdapter(logging.getLogger(__name__)) log.logger.addHandler(logging.NullHandler()) def log_url(response, **kwargs): """Response hook to log request URL.""" request = response.request log.debug( '{method} URL: {url} [Status: {status}]', { 'method': request.method, 'url': request.url, 'status': response.status_code, } ) log.debug('User-Agent: {}'.format(request.headers['User-Agent'])) if request.method.upper() == 'POST': if request.body: if 'multipart/form-data' not in request.headers.get('content-type', ''): body = request.body else: body = request.body[1:99].replace('\n', ' ') + '...' else: body = '' # try to log post data using various codecs to decode if isinstance(body, text_type): log.debug('With post data: {0}', body) return codecs = ('utf-8', 'latin1', 'cp1252') for codec in codecs: try: data = body.decode(codec) except UnicodeError as error: log.debug('Failed to decode post data as {codec}: {msg}', {'codec': codec, 'msg': error}) else: log.debug('With post data: {0}', data) break else: log.warning('Failed to decode post data with {codecs}', {'codecs': codecs})
Geological Museum is in the e. wing of the S.H. Knight Building at the University of Wyoming. The museum interprets the physical and historical geology of the state through displays of rocks, minerals and fossils. Of interest is a mounted skeleton of a brontosaurus, purported to be one of only five exhibited in the world. Other dinosaur displays include an allosaurus, tyrannosaurus and triceratops. Mon.-Sat. 10-4. Closed major holidays.
''' Base class for java, scala and python API docs generators Created on Jul 7, 2017 @author: acaproni ''' import sys import os import logging class DocGenerator(object): ''' The base class for API docs generators ''' def __init__(self,srcFolder,dstFolder,outFile=sys.stdout): """ Constructor @param srcFolder: the folder with sources to generate their documentation @param dstFolder: destination folder for the api docs @param outFile: the file where the output generated by calling java/scala/py-doc must be sent """ self.checkFolders(srcFolder,dstFolder) self.srcFolder=srcFolder self.dstFolder=dstFolder self.outFile=outFile assert self.outFile is not None def checkFolders(self,src,dst): """ Check if the source and dest folders are valid and if it is not the case, throws an exception @param src: the folder with java sources to check @param dst: destination folder to check """ # Check if src folder exists if not os.path.exists(src): logging.error("The source folder %s does not exist",src) raise OSError("The source folder", src,"does not exist") elif not os.path.isdir(src): logging.error("The source folder %s is not a directory",src) raise OSError("The source folder", src,"is not a directory") # Check if the destination folder exists if not os.path.exists(dst): os.mkdir(dst) if not os.path.exists(dst): logging.error("The destination folder %s does not exist",dst) raise OSError("The destination folder", dst,"does not exist") elif not os.path.isdir(dst): logging.error("The destination folder %s is not a directory",dst) raise OSError("The destination folder", dst,"is not a directory") def containsSources(self,folder,fileExtension): ''' @param folder: the folder (src or test) to check if contains java sources @param fileExtension: the extension of the files that the folder is supposed to contain @return: True if the passed folder contains java sources ''' for root, subdirs, files in os.walk(folder): for file in files: if file.endswith(fileExtension): return True return False def getSrcPaths(self,sourceFolder, includeTestFolder,folderName,fileExtension): """ Scan the source folder and return a list of source folders containg java files. Java source can be contained into src or test (the latter is used only if the includeTestFolder parameter is True) The search is recursive because a folder can contains several modules @param sourceFolder: root source folder (generally IAS, passed in the command line) @param includeTestFolder: True to inculde test folders in the scan @param folderName: the name of the folder containing the sources like java or python @param fileExtension: the extension of the files that the folder is supposed to contain """ ret = [] for root, subdirs, files in os.walk(sourceFolder): if root.endswith(os.path.sep+"main/"+folderName) or (includeTestFolder and root.endswith(os.path.sep+"test/"+folderName)): if self.containsSources(root,fileExtension): ret.append(root) return ret
Please call one of our trained staff at (856)823-1345. American Sheds offers a wide selection of backyard buildings to meet your needs. We offer a large variety of wood and vinyl sided buildings that you can choose the size that best fits your need. With a large assortment of options, you can customize your building exactly the way you want it. Please review the site requirements page to ensure we can successfully install your shed in your back yard. Installation in your yard (on site) is always included in the price. Delivery is free up to 30 miles from our Williamstown (08094) location. Delivery beyond that is charged at $2 per mile. A 50% deposit is required to schedule a delivery date. During normal business season, normal lead time for delivery is within 2-4 weeks. We accept personal checks, Visa, Mastercard, or Discover for payment of the deposit. Final payment is due upon completion of your building. @ 2006 American Sheds, All Rights Reserved.
''' Created on 21.01.2017 @author: gex ''' from ss.jobs.job import Job from ss.rsync import RSync class SyncJob(Job): ''' Synchronizes the 'to' directory with the 'from' directory. I.e., the directories will be exectly the same after the command ''' FROM_KEY='from' TO_KEY='to' EXCLUDE_KEY='exclude' def __init__(self,config,lastExecConfig,name): super(SyncJob, self).__init__(config,lastExecConfig,name) self._fromPath=self.getRequiredConfPath(SyncJob.FROM_KEY) self._toPath=self.getRequiredConfPath(SyncJob.TO_KEY) self._excludes=self.getOptionalConfStr(SyncJob.EXCLUDE_KEY).split(';') # Only non-empty entries, strip whitespace self._excludes=[x.strip() for x in self._excludes if len(x.strip()) > 0] def execute(self): rsync=RSync() rsync.setArchive(True) # Archive mode, preserve file attributes and more rsync.setStats(True) # Print stats rsync.setUpdate(True) # Update only files that are newer than files on rsync.setDelete(True) # Delete files on the receiver side that don't exist on the sender side if len(self._excludes) > 0: rsync.setDeleteExcluded(True) # Delete excluded dirs from receiver, if present rsync.setExcludes(self._excludes) # Execute rsync.execute(self._fromPath,self._toPath) return True
kitchen desk organizer organization a real storage ideas. kitchen desk organizer ideas design with chair inspirations co. kitchen desk organizer cabinet ideas units captivating best built in on nook coun. kitchen desk organizer a cute way to put things in their place getting organized counter office. kitchen desk organizer table house drawing o red 3 wooden desktop office den com under wall mounted hutch. kitchen desk organizer with extendable storage drawers for office home desktop book shelf supplies organizers wood black countertop. kitchen desk organizer ideas amazing area in small chairs white chair. kitchen desk organizer ideas inspiring room and study table innovative trend traditional home office designs with counter. kitchen desk organizer using a for storage homeowner as rack ideas. kitchen desk organizer recipe bar planning desks and kitchens cookbook storage ideas. kitchen desk organizer drawer organizers cabinets tray. kitchen desk organizer inventive ideas to keep your space always look neat counter office. kitchen desk organizer classics pull out organizers cabinet bamboo drawer boxes storage ideas. kitchen desk organizer to elegant collection of counter office. kitchen desk organizer under drawer organizers dividers storage ideas countertop. kitchen desk organizer counter remodel great pictures elegant ideas office. kitchen desk organizer area ideas splendid wonderful with bathroom small organization winsome another at the organizers. kitchen desk organizer modern traditional with wainscoting counter office. kitchen desk organizer best organization ideas on inside storage decorations 0 organi. kitchen desk organizer computer create a area table com. kitchen desk organizer best the accessories at office 7 countertop. kitchen desk organizer shelf wood drawer office with medium size of counter. kitchen desk organizer elegant photo in with raised panel cabinets and white ideas built photos counte. kitchen desk organizer wall calendar message board in pan. kitchen desk organizer chair remodel area desktop table scramble ideas organizers. kitchen desk organizer coffee table counter. kitchen desk organizer counter ideas storage.
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Classes and methods to create and manage Courses.""" __author__ = 'Pavel Simakov (psimakov@google.com)' import collections import datetime import logging import os import urllib import uuid import appengine_config from filer import AssetItemRESTHandler from filer import FileManagerAndEditor from filer import FilesItemRESTHandler from filer import TextAssetRESTHandler from label_editor import LabelManagerAndEditor, TrackManagerAndEditor from label_editor import LabelRestHandler, TrackRestHandler import messages from question_editor import GeneralQuestionRESTHandler from question_editor import GiftQuestionRESTHandler from question_editor import McQuestionRESTHandler from question_editor import QuestionManagerAndEditor from question_editor import SaQuestionRESTHandler from question_group_editor import QuestionGroupManagerAndEditor from question_group_editor import QuestionGroupRESTHandler from role_editor import RoleManagerAndEditor from role_editor import RoleRESTHandler import utils as dashboard_utils from common import crypto from common import jinja_utils from common import safe_dom from common import tags from common import users from common.utils import Namespace from controllers import sites from controllers.utils import ApplicationHandler from controllers.utils import CourseHandler from controllers.utils import ReflectiveRequestHandler from models import config from models import custom_modules from models import roles from models import services from models.models import RoleDAO from common import menus from google.appengine.api import app_identity custom_module = None TEMPLATE_DIR = os.path.join( appengine_config.BUNDLE_ROOT, 'modules', 'dashboard', 'templates') class DashboardHandler( CourseHandler, FileManagerAndEditor, LabelManagerAndEditor, TrackManagerAndEditor, QuestionGroupManagerAndEditor, QuestionManagerAndEditor, ReflectiveRequestHandler, RoleManagerAndEditor): """Handles all pages and actions required for managing a course.""" # This dictionary allows the dashboard module to optionally nominate a # specific sub-tab within each major tab group as the default sub-tab to # open when first navigating to that major tab. The default may be # explicitly specified here so that sub-tab registrations from other # modules do not inadvertently take over the first position due to order # of module registration. default_subtab_action = collections.defaultdict(lambda: None) get_actions = [ 'edit_settings', 'edit_unit_lesson', 'manage_asset', 'manage_text_asset', 'add_mc_question', 'add_sa_question', 'edit_question', 'add_question_group', 'edit_question_group', 'question_preview', 'question_group_preview', 'add_label', 'edit_label', 'add_track', 'edit_track', 'add_role', 'edit_role', 'import_gift_questions'] # Requests to these handlers automatically go through an XSRF token check # that is implemented in ReflectiveRequestHandler. post_actions = [ 'create_or_edit_settings', 'add_to_question_group', 'clone_question'] child_routes = [ (AssetItemRESTHandler.URI, AssetItemRESTHandler), (FilesItemRESTHandler.URI, FilesItemRESTHandler), (LabelRestHandler.URI, LabelRestHandler), (TrackRestHandler.URI, TrackRestHandler), (McQuestionRESTHandler.URI, McQuestionRESTHandler), (GiftQuestionRESTHandler.URI, GiftQuestionRESTHandler), (SaQuestionRESTHandler.URI, SaQuestionRESTHandler), (GeneralQuestionRESTHandler.URI, GeneralQuestionRESTHandler), (TextAssetRESTHandler.URI, TextAssetRESTHandler), (QuestionGroupRESTHandler.URI, QuestionGroupRESTHandler), (RoleRESTHandler.URI, RoleRESTHandler)] # List of functions which are used to generate content displayed at the top # of every dashboard page. Use this with caution, as it is extremely # invasive of the UX. Each function receives the handler as arg and returns # an object to be inserted into a Jinja template (e.g. a string, a safe_dom # Node or NodeList, or a jinja2.Markup). PAGE_HEADER_HOOKS = [] # A list of hrefs for extra CSS files to be included in dashboard pages. # Files listed here by URL will be available on every Dashboard page. EXTRA_CSS_HREF_LIST = [] # A list of hrefs for extra JS files to be included in dashboard pages. # Files listed here by URL will be available on every Dashboard page. EXTRA_JS_HREF_LIST = [] # A list of template locations to be included in dashboard pages ADDITIONAL_DIRS = [] # Dictionary that maps external permissions to their descriptions _external_permissions = {} # Dictionary that maps actions to permissions _get_action_to_permission = {} _post_action_to_permission = {} default_action = None GetAction = collections.namedtuple('GetAction', ['handler', 'in_action']) _custom_get_actions = {} # Map of name to GetAction _custom_post_actions = {} # Map of name to handler callback. # Create top level menu groups which other modules can register against. # I would do this in "register", but other modules register first. actions_to_menu_items = {} root_menu_group = menus.MenuGroup('dashboard', 'Dashboard') @classmethod def add_nav_mapping(cls, name, title, **kwargs): """Create a top level nav item.""" menu_item = cls.root_menu_group.get_child(name) if menu_item is None: is_link = kwargs.get('href') menu_cls = menus.MenuItem if is_link else menus.MenuGroup menu_item = menu_cls( name, title, group=cls.root_menu_group, **kwargs) if not is_link: # create the basic buckets pinned = menus.MenuGroup( 'pinned', None, placement=1000, group=menu_item) default = menus.MenuGroup( 'default', None, placement=2000, group=menu_item) advanced = menus.MenuGroup( 'advanced', None, placement=menus.MenuGroup.DEFAULT_PLACEMENT * 2, group=menu_item) return menu_item @classmethod def get_nav_title(cls, action): item = cls.actions_to_menu_items.get(action) if item: return item.group.group.title + " > " + item.title else: return None @classmethod def add_sub_nav_mapping( cls, group_name, item_name, title, action=None, contents=None, can_view=None, href=None, no_app_context=False, sub_group_name=None, **kwargs): """Create a second level nav item. Args: group_name: Name of an existing top level nav item to use as the parent item_name: A unique key for this item title: Human-readable label action: A unique operation ID for contents: A handler which will be added as a custom get-action on DashboardHandler can_view: Pass a boolean function here if your handler has additional permissions logic in it that the dashboard does not check for you. You must additionally check it in your handler. sub_group_name: The sub groups 'pinned', 'default', and 'advanced' exist in that order and 'default' is used by default. You can pass some other string to create a new group at the end. other arguments: see common/menus.py """ group = cls.root_menu_group.get_child(group_name) if group is None: logging.critical('The group %s does not exist', group_name) return if sub_group_name is None: sub_group_name = 'default' sub_group = group.get_child(sub_group_name) if not sub_group: sub_group = menus.MenuGroup( sub_group_name, None, group=group) item = sub_group.get_child(item_name) if item: logging.critical( 'There is already a sub-menu item named "%s" registered in ' 'group %s subgroup %s.', item_name, group_name, sub_group_name) return if contents: action = action or group_name + '_' + item_name if action and not href: href = "dashboard?action={}".format(action) def combined_can_view(app_context): if action: # Current design disallows actions at the global level. # This might change in the future. if not app_context and not no_app_context: return False # Check permissions in the dashboard if not cls.can_view(action): return False # Additional custom visibility check if can_view and not can_view(app_context): return False return True item = menus.MenuItem( item_name, title, action=action, group=sub_group, can_view=combined_can_view, href=href, **kwargs) cls.actions_to_menu_items[action] = item if contents: cls.add_custom_get_action(action, handler=contents) @classmethod def add_custom_get_action(cls, action, handler=None, in_action=None, overwrite=False): if not action: logging.critical('Action not specified. Ignoring.') return False if not handler: logging.critical( 'For action : %s handler can not be null.', action) return False if ((action in cls._custom_get_actions or action in cls.get_actions) and not overwrite): logging.critical( 'action : %s already exists. Ignoring the custom get action.', action) return False cls._custom_get_actions[action] = cls.GetAction(handler, in_action) return True @classmethod def remove_custom_get_action(cls, action): if action in cls._custom_get_actions: cls._custom_get_actions.pop(action) @classmethod def add_custom_post_action(cls, action, handler, overwrite=False): if not handler or not action: logging.critical('Action or handler can not be null.') return False if ((action in cls._custom_post_actions or action in cls.post_actions) and not overwrite): logging.critical( 'action : %s already exists. Ignoring the custom post action.', action) return False cls._custom_post_actions[action] = handler return True @classmethod def remove_custom_post_action(cls, action): if action in cls._custom_post_actions: cls._custom_post_actions.pop(action) @classmethod def get_child_routes(cls): """Add child handlers for REST.""" return cls.child_routes @classmethod def can_view(cls, action): """Checks if current user has viewing rights.""" app_context = sites.get_app_context_for_current_request() if action in cls._get_action_to_permission: return cls._get_action_to_permission[action](app_context) return roles.Roles.is_course_admin(app_context) @classmethod def can_edit(cls, action): """Checks if current user has editing rights.""" app_context = sites.get_app_context_for_current_request() if action in cls._post_action_to_permission: return cls._post_action_to_permission[action](app_context) return roles.Roles.is_course_admin(app_context) def default_action_for_current_permissions(self): """Set the default or first active navigation tab as default action.""" item = self.root_menu_group.first_visible_item(self.app_context) if item: return item.action def get(self): """Enforces rights to all GET operations.""" action = self.request.get('action') if not action: self.default_action = self.default_action_for_current_permissions() action = self.default_action self.action = action if not self.can_view(action): self.redirect(self.app_context.get_slug()) return if action in self._custom_get_actions: result = self._custom_get_actions[action].handler(self) if result is None: return # The following code handles pages for actions that do not write out # their responses. template_values = { 'page_title': self.format_title(self.get_nav_title(action)), } if isinstance(result, dict): template_values.update(result) else: template_values['main_content'] = result self.render_page(template_values) return # Force reload of properties. It is expensive, but admin deserves it! config.Registry.get_overrides(force_update=True) return super(DashboardHandler, self).get() def post(self): """Enforces rights to all POST operations.""" action = self.request.get('action') self.action = action if not self.can_edit(action): self.redirect(self.app_context.get_slug()) return if action in self._custom_post_actions: # Each POST request must have valid XSRF token. xsrf_token = self.request.get('xsrf_token') if not crypto.XsrfTokenManager.is_xsrf_token_valid( xsrf_token, action): self.error(403) return self._custom_post_actions[action](self) return return super(DashboardHandler, self).post() def get_template(self, template_name, dirs=None): """Sets up an environment and Gets jinja template.""" return jinja_utils.get_template( template_name, (dirs or []) + [TEMPLATE_DIR], handler=self) def get_alerts(self): alerts = [] if not self.app_context.is_editable_fs(): alerts.append('Read-only course.') if not self.app_context.now_available: alerts.append('The course is not publicly available.') return '\n'.join(alerts) def _get_current_menu_action(self): registered_action = self._custom_get_actions.get(self.action) if registered_action: registered_in_action = registered_action.in_action if registered_in_action: return registered_in_action return self.action def render_page(self, template_values, in_action=None): """Renders a page using provided template values.""" template_values['header_title'] = template_values['page_title'] template_values['page_headers'] = [ hook(self) for hook in self.PAGE_HEADER_HOOKS] template_values['course_title'] = self.app_context.get_title() current_action = in_action or self._get_current_menu_action() template_values['current_menu_item'] = self.actions_to_menu_items.get( current_action) template_values['courses_menu_item'] = self.actions_to_menu_items.get( 'courses') template_values['root_menu_group'] = self.root_menu_group template_values['course_app_contexts'] = get_visible_courses() template_values['app_context'] = self.app_context template_values['current_course'] = self.get_course() template_values['gcb_course_base'] = self.get_base_href(self) template_values['user_nav'] = safe_dom.NodeList().append( safe_dom.Text('%s | ' % users.get_current_user().email()) ).append( safe_dom.Element( 'a', href=users.create_logout_url(self.request.uri) ).add_text('Logout')) template_values[ 'page_footer'] = 'Page created on: %s' % datetime.datetime.now() template_values['coursebuilder_version'] = ( os.environ['GCB_PRODUCT_VERSION']) template_values['application_id'] = app_identity.get_application_id() version = os.environ['CURRENT_VERSION_ID'] if '.' not in version or not appengine_config.PRODUCTION_MODE: template_values['application_version'] = version else: version, deployed_at = version.split('.', 1) template_values['application_version'] = version template_values['deployed_at'] = datetime.datetime.utcfromtimestamp( int(deployed_at) >> 28) # Yes, really. template_values['extra_css_href_list'] = self.EXTRA_CSS_HREF_LIST template_values['extra_js_href_list'] = self.EXTRA_JS_HREF_LIST template_values['powered_by_url'] = services.help_urls.get( 'dashboard:powered_by') if not template_values.get('sections'): template_values['sections'] = [] if not appengine_config.PRODUCTION_MODE: template_values['page_uuid'] = str(uuid.uuid1()) self.response.write( self.get_template('view.html').render(template_values)) @classmethod def register_courses_menu_item(cls, menu_item): cls.actions_to_menu_items['courses'] = menu_item def format_title(self, text): """Formats standard title with or without course picker.""" ret = safe_dom.NodeList() cb_text = 'Course Builder ' ret.append(safe_dom.Text(cb_text)) ret.append(safe_dom.Entity('&gt;')) ret.append(safe_dom.Text(' %s ' % self.app_context.get_title())) ret.append(safe_dom.Entity('&gt;')) dashboard_text = ' Dashboard ' ret.append(safe_dom.Text(dashboard_text)) ret.append(safe_dom.Entity('&gt;')) ret.append(safe_dom.Text(' %s' % text)) return ret def get_action_url(self, action, key=None, extra_args=None, fragment=None): args = {'action': action} if key: args['key'] = key if extra_args: args.update(extra_args) url = '/dashboard?%s' % urllib.urlencode(args) if fragment: url += '#' + fragment return self.canonicalize_url(url) def _render_roles_list(self): """Render roles list to HTML.""" all_roles = sorted(RoleDAO.get_all(), key=lambda role: role.name) return safe_dom.Template( self.get_template('role_list.html'), roles=all_roles) def _render_roles_view(self): """Renders course roles view.""" actions = [{ 'id': 'add_role', 'caption': 'Add Role', 'href': self.get_action_url('add_role')}] sections = [{ 'description': messages.ROLES_DESCRIPTION, 'actions': actions, 'pre': self._render_roles_list() }] template_values = { 'page_title': self.format_title('Roles'), 'sections': sections, } return template_values @classmethod def map_get_action_to_permission(cls, action, module, perm): """Maps a view/get action to a permission. Map a GET action that goes through the dashboard to a permission to control which users have access. Example: The i18n module maps multiple actions to the permission 'access_i18n_dashboard'. Users who have a role assigned with this permission are then allowed to perform these actions and thus access the translation tools. Args: action: a string specifying the action to map. module: The module with which the permission was registered via a call to models.roles.Roles.register_permission() permission: a string specifying the permission to which the action should be mapped. """ checker = lambda ctx: roles.Roles.is_user_allowed(ctx, module, perm) cls.map_get_action_to_permission_checker(action, checker) @classmethod def map_get_action_to_permission_checker(cls, action, checker): """Map an action to a function to check permissions. Some actions (notably settings and the course overview) produce pages that have items that may be controlled by multiple permissions or more complex verification than a single permission allows. This function allows modules to specify check functions. Args: action: A string specifying the name of the action being checked. This should have been registered via add_custom_get_action(), or present in the 'get_actions' list above in this file. checker: A function which is run when the named action is accessed. Registered functions should expect one parameter: the application context object, and return a Boolean value. """ cls._get_action_to_permission[action] = checker @classmethod def unmap_get_action_to_permission(cls, action): del cls._get_action_to_permission[action] @classmethod def map_post_action_to_permission(cls, action, module, perm): """Maps an edit action to a permission. (See 'get' version, above.)""" checker = lambda ctx: roles.Roles.is_user_allowed(ctx, module, perm) cls.map_post_action_to_permission_checker(action, checker) @classmethod def map_post_action_to_permission_checker(cls, action, checker): """Map an edit action to check function. (See 'get' version, above).""" cls._post_action_to_permission[action] = checker @classmethod def unmap_post_action_to_permission(cls, action): """Remove mapping to edit action. (See 'get' version, above).""" del cls._post_action_to_permission[action] @classmethod def deprecated_add_external_permission(cls, permission_name, permission_description): """Adds extra permissions that will be registered by the Dashboard. Normally, permissions should be registered in their own modules. Due to historical accident, the I18N module registers permissions with the dashboard. For backward compatibility with existing roles, this API is preserved, but not suggested for use by future modules. """ cls._external_permissions[permission_name] = permission_description @classmethod def remove_external_permission(cls, permission_name): del cls._external_permissions[permission_name] @classmethod def permissions_callback(cls, unused_app_context): return cls._external_permissions.iteritems() @classmethod def current_user_has_access(cls, app_context): return cls.root_menu_group.can_view(app_context, exclude_links=True) @classmethod def generate_dashboard_link(cls, app_context): if cls.current_user_has_access(app_context): return [('dashboard', 'Dashboard')] return [] def make_help_menu(): DashboardHandler.add_nav_mapping('help', 'Help', placement=6000) DashboardHandler.add_sub_nav_mapping( 'help', 'documentation', 'Documentation', href=services.help_urls.get('help:documentation'), target='_blank') DashboardHandler.add_sub_nav_mapping( 'help', 'forum', 'Support', href=services.help_urls.get('help:forum'), target='_blank') DashboardHandler.add_sub_nav_mapping( 'help', 'videos', 'Videos', href=services.help_urls.get('help:videos'), target='_blank') def get_visible_courses(): result = [] for app_context in sorted(sites.get_all_courses(), key=lambda course: course.get_title().lower()): with Namespace(app_context.namespace): if DashboardHandler.current_user_has_access(app_context): result.append(app_context) return result def register_module(): """Registers this module in the registry.""" DashboardHandler.add_nav_mapping('edit', 'Create', placement=1000) DashboardHandler.add_nav_mapping('style', 'Style', placement=2000) DashboardHandler.add_nav_mapping('publish', 'Publish', placement=3000) DashboardHandler.add_nav_mapping('analytics', 'Manage', placement=4000) DashboardHandler.add_nav_mapping('settings', 'Settings', placement=5000) make_help_menu() # pylint: disable=protected-access DashboardHandler.add_sub_nav_mapping( 'settings', 'roles', 'Roles', action='edit_roles', contents=DashboardHandler._render_roles_view) # pylint: enable=protected-access def on_module_enabled(): roles.Roles.register_permissions( custom_module, DashboardHandler.permissions_callback) ApplicationHandler.AUTH_LINKS.append( DashboardHandler.generate_dashboard_link) global_routes = [ (dashboard_utils.RESOURCES_PATH +'/js/.*', tags.JQueryHandler), (dashboard_utils.RESOURCES_PATH + '/.*', tags.DeprecatedResourcesHandler)] dashboard_handlers = [ ('/dashboard', DashboardHandler), ] global custom_module # pylint: disable=global-statement custom_module = custom_modules.Module( 'Course Dashboard', 'A set of pages for managing Course Builder course.', global_routes, dashboard_handlers, notify_module_enabled=on_module_enabled) return custom_module
O-Bank's primary stakeholders are employees, customers, shareholders, suppliers, government agencies, the local community, the public, and the news media. The matters they care about are diverse, and have wide-ranging impact. To fulfill its corporate social responsibilities, O-Bank maintains a number of channels for two-way communication with stakeholders, and identifies material topics of concern to stakeholders to set the agenda for key management policies and plans for their execution. Making reference to the CSR Reports of peer companies in the financial industry, O-Bank has identified employees, shareholders, government agencies, customers, suppliers, the local community, the public, NPOs/NGOs, CSR experts and scholars, artists, media, and labor unions as potential stakeholders. CSR representatives from the Bank's various units have met for internal discussions and redefined the scope of stakeholders to exclude CSR experts and scholars, labor unions, media, and NPOs/NGOs. This decision was based on the dual principles of "relevance to the business of O-Bank's departments, and frequency of contact." In addition, they merged "artists and the local community" into the more broadly defined stakeholder category of "the public," resulting in six major stakeholder categories: employees, shareholders, government agencies, customers, suppliers, and the public. O-Bank's highest authority for governance is the Board of Directors, which has 15 members with diverse professional backgrounds, skill sets, and industry experience. The directors are responsible for reviewing the company's business plan, any organizational expansion or downsizing, budgets and year-end accounts, appointment and dismissal of key managerial officers, and approval of matters of material importance. Also, in order to ensure sound decision-making and improve managerial and oversight mechanisms, the Board maintains various special-purpose committees, including an Audit Committee, Compensation Committee, Investment Committee, and Corporate Social Responsibility Committee. The purpose of the Audit Committee is to assist the Board of Directors with its supervisory duties. The purpose of the Compensation Committee is to assist the Board of Directors with evaluating and supervising the Bank's overall compensation policy and the compensation of Directors and managerial officers. The purpose of the Investment Committee is to assist the Board of Directors in evaluating and supervising investment quality and reviewing investment plans. The purpose of the Corporate Social Responsibility Committee is to set CSR-related regulations, policy direction, and carry out CSR plans. To effectively optimize internal controls and put the company on stronger organizational footing, O-Bank has adopted the "three lines of defense" model for effective risk management and control. This approach has entailed the establishment of a self-audit system, a legal compliance system, a risk management system, and an internal control system. In the area of legal compliance, O-Bank adopts an annual legal compliance plan. Under this plan, in addition to reviewing compliance systems, structures, and regulations, the Bank also conducts an integrated inspection of all departments' compliance self-assessments, analyzes reasons for serious compliance failures, and makes suggestions for improvement. In the area of risk management, the highest authority in O-Bank’s risk management organization is the Board of Directors, and below it are the Auditing Department, Audit Committee, Investment Committee, and Compensation Committee. Reporting to the President are the Asset and Liability Committee, Investment Product Quality Evaluation Committee, and Loan Evaluation Committee, which assess the risk of related projects. As for internal auditing, the Auditing Department conducts periodic on-site audits at every unit and also supervises their self-audits. Once each half-year, the chief auditor reports on these matters to the Board of Directors. To implement ethical best practice, O-Bank has adopted a "Code of Ethical Conduct," a set of "Ethical Best Practice Standards," and a "Handbook for Ethical Corporate Management Procedures and Practices." The Corporate Social Responsibility Committee's Corporate Governance Subcommittee is responsible for the following: adopting and implementing ethical best practice policies as well as measures for preventing malfeasance; ensuring that directors, managerial officers, and other employees do not violate ethical principles; monitoring the effectiveness of the company's ethics standards; and periodically reporting to the Board of Directors. Also, O-Bank has established a Whistleblower System to encourage any person to blow the whistle on any unethical or improper conduct. Under this system, someone who discovers a possible occurrence of crime, cheating, or regulatory violations can report the matter via a "tip box" to the chief auditor. A whistleblower's identity, and the content of his or her report, are kept strictly confidential. To ensure that all employees comply with the Ethical Best Practice Standards in the conduct of their job duties, O-Bank in 2017 retained a professional from KPMG Taiwan to provide instruction to O-Bank employees on the Handbook for Ethical Corporate Management Procedures and Practices. The purpose of the course, which was completed by 75 persons, was to improve understanding of ethics concepts among risk officers and other employees. Also, O-Bank used its e-learning platform to carry out online instruction and testing of employees throughout the entire company, thus improving all employees' familiarity with ethical best practices, ensuring comprehensive legal compliance, and preventing unethical conduct. The online training was completed by 805 persons, which translates to a participation rate of 100%. After reorganizing as a commercial bank in 2017, O-Bank's staffing needs increased significantly. At year-end 2017, the Bank employed 904 people. As an equal opportunity employer, we do not discriminate on the basis of gender, age, ethnicity, or other such factors. O-Bank employed equal numbers of men and women in 2017, while women accounted for roughly 48% of management and 47% of all promotions. These figures show that all employees enjoy fully equal opportunities for career development and advancement. In addition, O-Bank employed 11 persons with physical disabilities (1.2% of total staff), which exceeded the regulatory standard. O-Bank also maintains clear regulations against any kind of sexual harassment, discrimination, or threatening behavior, and its on-the-job training programs include courses on gender equality, prevention of sexual harassment, and the needs of disadvantaged persons. O-Bank's starting salaries are above the minimum required by the Labor Standards Act. In 2017, starting monthly salaries for university graduates hired by the O-Bank Group were 1.6 times the minimum wage, and the Bank makes annual salary adjustments, granting an average raise of 3% in 2017 to 97% of all employees who took part in performance evaluations. Employee benefits are also generous. In addition to providing employees with labor insurance, national health insurance, and retirement pensions as required by law, O-Bank also offers employees and their family members with comprehensive group insurance. Also, O-Bank arranges regular health examinations that exceed regulatory requirements for employees. In 2017, 87.2% of employees took advantage of the health examination program. Living expense allowances include: holiday bonuses; birthday bonuses; scholarships for dependents; birth, marriage, and funeral allowances; company dinner allowances; preferential-rate deposits for employees; and collaboration with daycare facilities for childcare discounts. In addition, we have also adopted the "O-Bank Rules for Administration of Employee Loans" to assist employees with housing purchases or other financial needs. In addition to helping employees achieve work-life balance by offering menstrual leave, parental leave, paternity leave, and family leave, O-Bank goes beyond the statutory minimum with additional paid leave options such as family ties leave (for employees who need to care for family members), community service leave (to encourage employees to take part in community service activities), paternity leave for prenatal checkups (so that husbands can accompany their wives to prenatal checkups), and birthday leave (to allow employees to leave work a half-day early on any day of their choosing during their birthday month). And in addition to this extremely varied benefit and leave system, the O-Bank headquarters building has a reading room, an employee lounge, a "health kiosk," a massage and rest area, a music room, and an art gallery where employees can go to relax and enjoy art. As for employee training, in addition to arranging in-house and outside training courses, we have also launched the "O-Bank digital academy" digital learning platform, which offers access to a wide range of online courses covering such fields as financial services, legal compliance, anti-money laundering, information security, and professional and managerial techniques. In addition, O-Bank offers employees allowances to offset first-time testing fees for professional licenses regardless whether the qualifications are sought for business purposes, to satisfy legal requirements, or for personal reasons. To encourage the pursuit of excellence, O-Bank has a comprehensive performance evaluation system and conducts annual performance management. Under this system, each employee is interviewed by his or her immediate supervisor for a preliminary evaluation. This is then followed up with an inter-departmental secondary review to conduct a performance alignment by senior management for all employees at the same occupational rank. The purpose is to ensure consistent application of standards throughout the entire bank. Because of the company's rapidly growing personnel needs, O-Bank has established an Assessment Center to identify employees with outstanding potential and provide them with proper training. To prevent workplace accidents, O-Bank has adopted a set of "Working Rules for Occupational Safety and Health," which sets out express standards for workplace safety and health and provides clear and detailed instructions regarding first aid measures, use of emergency equipment, and accident reporting procedures. At the same time, the Bank carries out employee safety and health training as well as disaster preparedness drills, and works to raise safety awareness. In 2017, O-Bank's safety and health training courses as well as its secondary fire and earthquake preparedness team drills were completed by 182 and 108 employees, respectively. These activities have improved employees' emergency response capabilities. O-Bank has created a smokeless working environment, regularly tests its CO2 emissions and use of lighting, and rigorously enforces its door access restrictions to ensure employee safety. The Bank's employee health assessments and workplace health promotion program work extremely well, and the company has received a "Healthy Workplace Certification" from the Ministry of Health and Welfare's Health Promotion Administration. In the area of labor rights, O-Bank holds meetings of its labor-management council to discuss labor legislation safeguards, and after the council passes resolutions it incorporates the content thereof into the Bank's internal work rules and announces the rules to all employees. Also, in order to create more opportunities for interaction between employees and senior management, O-Bank in 2017 held a series of company events to provide venues for two-way communications between employees and senior management. In addition, O-Bank has established a "Complaint Mailbox" and a "Sexual Harassment Complaint Mailbox." Cases reported via these channels are treated as confidential matters in order to protect the interests of complainants. The company also maintains a "We Care" mailbox to encourage suggestions for ways to care for employee needs. For many years now, O-Bank has provided its corporate customers with comprehensive solutions by creating boutique financial services featuring real-time convenience, professional sophistication, and reliable security. In January 2017, O-Bank reorganized as a commercial bank, branched out into retail banking, and tapped into digital technology to become the first native digital bank in Taiwan. Ordinary consumers can now use their mobile devices to access financial services online, thus turning O-Bank's vision of financial inclusion into reality. O-Bank's corporate business services include corporate banking, global transactions banking, financial markets, investment services, securitization, and trust services. In addition, the Bank also provides stepped coupon bonds that are tailored to meet specific customer needs under specific market conditions. Also, O-Bank has launched a Corporate Internet Banking service (to provide corporate customers with convenient and secure electronic banking services), uses an "e-Advice" automated messaging system (which provides customers instantaneous access to the latest business and market information), and offers Automated Clearing House services (which enable customers to receive and pay funds, thus increasing the convenience and security of transactions). For each corporate customer, O-Bank provides a dedicated account representative to ensure maximum service quality. At the same time, the Bank holds a special campaign to show appreciation to its customers and build stronger ties with them. Since O-Bank formally launched its retail banking services in 2017, consumers have been able to use mobile devices to access a wide range of financial services, including fully online account openings, fully online loan applications, online purchasing of shares in investment funds, and robo-advisor service. In addition, O-Bank has developed many innovative new online app features. One of these is the O-Bank Easy Payments, which can use QR codes, GPS data, or mobile phone numbers to execute funds transfers. Another is the Payment Analysis, which facilitates analysis of the user's account transactions and debit card payments. Yet another is the O-Bank Red Envelope, which customers can use to send "red envelope" cash gifts in digital form via their mobile devices. All these features serve to integrate financial services into the lives of O-Bank customers. And in order to better understand the views of consumers, in addition to online surveys targeted to major online communities, the Bank held an event in December 2017 to meet face-to-face with O-Bank fans, hear their thoughts and suggestions, share information with them concerning future product launches, and invite them to take part in product testing. The idea is to provide digital banking services that better meet the needs of consumers. O-Bank has taken a series of measures in all its business operations to protect customer information. In addition to complying with the requirements of the "Personal Information Protection Act," the "Financial Supervisory Commission's Regulations Governing Security Measures for Personal Information Files at Designated Non-Public Agencies," and other related legislation, the Bank has also adopted its own "O-Bank Rules Governing Security Measures for Personal Information Files" and related information security management systems, and hired an information security firm to install an information security system. By inventorying personal information, carrying out risk assessments, and implementing personal information management measures, the Bank has raised employee awareness of the need to protect personal information, successfully safeguarded the security of customers' personal information, and fulfilled its duty of due care. O-Bank strives in the course of routine operations to act in an environmentally conscious manner. The company does everything possible to reduce the company's environmental impact, and seeks to exist in harmony with the environment. Amidst today's rapid advances in the digital sphere, O-Bank has gone paperless with numerous official documents and operations, including the Board of Directors' Meeting Agenda and related attachments, the audit report of the Audit Committee, and the electronic forms system. Employee training, meanwhile, is conducted through the "O-Bank digital academy" digital learning platform to reduce printing of paper teaching materials, and the Bank is also working to reduce paper use by setting copiers and printers in all departments to do double-sided printing by default. In addition, the Bank communicates with corporate customers via the "e-Advice" automated messaging system to reduce printing and mailing. On average, these measures reduce paper forms usage by some 5,000 items per month, which means an 84% reduction in paper usage. O-Bank is addressing the problem of global climate change by implementing energy conservation policies. In 2017, the Bank phased out inefficient halogen lamps and high-pressure sodium lamps, keeping the original fixtures and switching in more efficient LED tubes. A total of 361 lamps were replaced over the past year, reducing annual energy consumption by an estimated 59,949 kWh (69.91%) and cutting CO2 emissions by some 31.71 metric tons. The O-Bank headquarters employs inverter air conditioning and a variety of sun shades to improve indoor cooling efficiency. It also employs software to control elevator traffic. The number of elevator cars in operation varies from peak to off-peak hours, with only one car kept running on weekends and holidays. As for water resource management, O-Bank relies solely on the public water supply, and has installed infrared sensors at restroom taps to control water usage. All water supply devices are regularly cleaned and filters are changed and inspected, while plants and landscaping are watered according to season and weather to adjust consumption. O-Bank's suppliers are primarily construction contractors and providers of IT equipment, office equipment, and office supplies. The Bank's "Rules for Purchase Requisition and Payment Procedures" places priority on green products, giving first consideration to environmentally friendly vendors to gradually increase the proportion of collaboration with green suppliers, while at the same time promoting the concept of green consumption to all suppliers. For example, at the Taipei Vie Show Branch and the Zhongxiao Dunhua Branch, both of which the Bank opened in 2017, green purchasing has accounted for 15% total disbursements. O-Bank reorganized in 2017 as a commercial bank and relies primarily on virtual delivery channels, which means it is the first native digital bank in Taiwan. Having the borderless service model of a digital bank greatly reduces the energy consumption that would otherwise accompany the establishment of physical branches. Consumers no longer need to expend fuel to travel to physical branches to conduct their banking business, and online transactions generally do not involve the printout of paper vouchers. Technology, in the hands of O-Bank, helps us all live in harmony with the environment. O-Bank embraces the ideals of giving back to society and using sustainable business practices, seeks to reflect a corporate culture of "always sincere, always here" in its social outreach and philanthropic activities, and strives to create a financial group with a human touch. Since the O-Bank headquarters building in the Neihu Science Park was completed in 2008, the O-Bank Educational Foundation has held frequent concerts, lectures, and exhibits in its first floor concert hall and gallery space. Furthermore, the Foundation's "Stars of TiDing Arts Promotion Project" has provided a venue for new and aspiring artists. The Ninth Annual Stars of TiDing Plan received a total of 71 performance and exhibition proposals, and selected 16 performances and 7 exhibits. These events will support a total of 71 budding artists. In addition, the O-Bank Educational Foundation encourages the younger generation to innovate boldly and realize their dreams of entrepreneurship. It has combined government and academic resources to hold the WeWin Entrepreneurship Competition for 10 consecutive years. It has also held business startup fairs, business startup workshops, and other such activities. In 2015, O-Bank established the O-Bank Community Volunteer Club. O-Bank Vice Chairman Tina Y. Lo, who chairs the Bank's CSR Committee, serves as the Club's honorary president. As of year-end 2017, the Club had 50 members, which was 5.5% of the Bank's Taiwan-based employees. Since its founding in 2015, the Club has made a special effort to serve the needs of schools in remote areas where the children of immigrants account for a relatively large share of all students. The Club donates NT$12,000 per semester, for example, to fund scholarships for students at Jiqing Elementary School in New Taipei City, with scholarship support going to 36 students each semester. Also, in order to help students in remote areas develop a proper understanding of personal financial planning and learn how to spot scam artists, the O-Bank Community Volunteer Club held financial literacy events at remote elementary schools in New Taipei City, attracting 88 participants in 2017. In addition, O-Bank also held a "Holiday Spirit" community service program, a "Cinema for the Mind" program, and a "Rural Schools Culture & Arts" program to create a wide range of extracurricular activities for young students. In addition, O-Bank has run a Summer Internship Program annually since 2015 to assist low-income students at colleges and universities with their career development efforts. Participation in this six-week program is open to students from underprivileged backgrounds who are currently enrolled in 3rd year or above (including graduate students). During the program, in addition to learning about the financial industry and receiving on-site training at financial institutions, interns also earn a salary, and stellar performers have an inside track for job opportunities after graduation. O-Bank hopes that these training opportunities will make the internship participants more competitive in the job market. Four students took part in the program in 2017. O-Bank maintains diverse ties with social enterprises. For the past three years in a row, the Bank has held farmers' markets at its headquarters building, inviting social enterprises to come and sell organic fruits and vegetables, and it has joined forces with a social enterprise named Do You a Flavor to hold food drives in which employees of the Bank donate extra food from home to Do You a Flavor, which then makes the food into meals that it distributes to the homeless. O-Bank also purchases products from social enterprises to give away as holiday gift items. It bought over 500 such products in 2017 for a total of more than NT$300,000. For its efforts, O-Bank received a 2nd Class "Buying Power" Award in the new product and service of social innovation Purchase Reward Program from the Small and Medium Enterprise Administration of Ministry of Economic Affairs. And in order to further support the development of social enterprises, O-Bank in 2018 launched the “Exclusive Social Enterprises Demand Deposit Accounts featuring preferential interest rates “as well as the "preferential Social Enterprises payroll account features and services." Enterprises are an important force for social change. O-Bank hopes to tap into the power of Taiwanese enterprises and work together with them to achieve its vision of co-existing with the natural environment and benefitting together with society. For this reason, O-Bank launched a "B Corp Campaign" in 2017 to encourage other Taiwanese firms to become a Certified B Corporation. This certification, which is based on the B Lab’s B Corp standards, constitutes recognition that a certified company, in addition to pursuing profits, also pays due attention to environmental and social concerns. Such a company is an entity that does the best for employees, the environment, and society. In hopes of acting as a catalyst, O-Bank itself obtained B Corp certification on 30 October 2017. Besides being the first listed bank in the world to become a B Corp, we are also the first listed company in Taiwan and the first financial services firm here to do so. To fully achieve the B Corp ideal of "not best IN the world, but best FOR the world," O-Bank has joined with the Taipei City Government and B Lab Taiwan in holding a "Best for Taipei" corporate challenge, which offered 20 different courses to help companies prepare for the challenge. In addition, O-Bank also helped B Lab Taiwan hold the 2017 B Corp Asia Forum in Taichung, where scholars and representatives of over 30 overseas Certified B Corporations with representatives of Taiwanese B Corporations to interact and learn from each other. In November 2017, O-Bank joined with B Lab Taiwan in co-hosting a forum on B Corp issues, where representatives of Taiwanese B Corporations gathered to discuss their experiences and identify ways to care for the needs of employees, the environment, and society. The event was attended by more than 200 owners and senior executives of many different companies, some of which are O-Bank clients and suppliers. And to support the development of Taiwanese B Corporations, O-Bank has also set an industry precedent by offering exclusive B Corp demand deposit accounts featuring preferential interest rates, preferential B Corp payroll account features and services, and a special loan program for companies with B Corp Certification. © O-Bank, All rights reserved. Championed by the U.S. non-profit B Lab, the B Movement urges businesses to care for not only financial returns but also social and environmental issues through the B Corp Certification that assesses a company’s overall impact across five dimensions of governance, workers, community, environment, and customers. Together with the growing B Corp community worldwide, O-Bank will continue to strive for the vision of financial inclusion via digital innovations, use business as a force for good, and ultimately redefine success in business.
# -*-coding:Utf-8 -* # Copyright (c) 2010 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Fichier contenant le contexte 'communication:immersion'""" from primaires.format.constantes import ponctuations_finales from primaires.interpreteur.contexte import Contexte from primaires.communication.contextes.invitation import Invitation class Immersion(Contexte): """Contexte d'immersion dans un canal de communication. """ def __init__(self, pere): """Constructeur du contexte""" Contexte.__init__(self, pere) self.opts.prompt_prf = "" self.opts.prompt_clr = "" self.canal = None self.options = { # Options d'user "q" : self.opt_quit, "w" : self.opt_who, "h" : self.opt_help, "i" : self.opt_invite, "me" : self.opt_emote, # Options de modo "e" : self.opt_eject, "b" : self.opt_ban, "a" : self.opt_announce, # Options d'admin "p" : self.opt_promote, "ed" : self.opt_edit, "d" : self.opt_dissolve, } def __getstate__(self): """Nettoyage des options""" dico_attr = Contexte.__getstate__(self) dico_attr["options"] = dico_attr["options"].copy() for rac, fonction in dico_attr["options"].items(): dico_attr["options"][rac] = fonction.__name__ return dico_attr def __setstate__(self, dico_attr): """Récupération du contexte""" Contexte.__setstate__(self, dico_attr) for rac, nom in self.options.items(): fonction = getattr(self, nom) self.options[rac] = fonction @property def u_nom(self): return "immersion:" + self.canal.nom def accueil(self): """Message d'accueil du contexte""" canal = self.canal res = canal.clr + ">|ff| Immersion dans le canal " + canal.nom res += "\n Entrez |ent|/h|ff| pour afficher l'aide." return res def opt_quit(self, arguments): """Option quitter : /q""" canal = self.canal personnage = self.pere.joueur canal.immerger_ou_sortir(personnage) personnage << canal.clr + ">|ff| Retour au jeu." def opt_who(self, arguments): """Option qui : /w""" personnage = self.pere.joueur res = self.canal.clr + ">|ff| Joueurs connectés :" for connecte in self.canal.connectes: if connecte in type(self).importeur.connex.joueurs_connectes: if connecte is self.canal.auteur: statut = "|rgc|@" elif connecte in self.canal.moderateurs: statut = "|jn|*" else: statut = "|bc|" res += "\n " + statut + connecte.nom + "|ff|" if connecte in self.canal.immerges: res += " (immergé)" personnage << res def opt_help(self, arguments): """Options d'affichage de l'aide : /h""" personnage = self.pere.joueur canal = self.canal res = canal.clr + ">|ff| Aide du canal |ent|{}|ff| ({}) :\n".format( canal.nom, canal.resume) res += str(canal.description) res += "\n Administrateur : |rgc|" res += (canal.auteur and canal.auteur.nom or "aucun") + "|ff|" modos = "" if len(canal.moderateurs) == 1: modos = "\n Modérateur : |jn|" + canal.moderateurs[0].nom + "|ff|" elif len(canal.moderateurs) > 1: modos = "\n Modérateurs : |jn|" + "|ff|, |jn|".join( sorted([modo.nom for modo in canal.moderateurs])) + "|ff|" res += modos res += "\n Commandes disponibles :" res += "\n - |cmd|/h|ff| : affiche ce message d'aide" res += "\n - |cmd|/w|ff| : liste les joueurs connectés au canal" res += "\n - |cmd|/i <joueur>|ff| : invite un joueur à rejoindre " res += "le canal" res += "\n - |cmd|/me <message>|ff| : joue une emote dans le canal" res += "\n - |cmd|/q|ff| : permet de sortir du mode immersif" if personnage in canal.moderateurs or personnage is canal.auteur \ or personnage.est_immortel(): res += "\n Commandes de modération :" res += "\n - |cmd|/e <joueur>|ff| : éjecte un joueur" res += "\n - |cmd|/b <joueur>|ff| : bannit ou rappelle un joueur" res += "\n - |cmd|/a <message>|ff| : permet d'envoyer une " res += "annonce impersonnelle" if personnage is canal.auteur or personnage.est_immortel(): res += "\n Commandes d'administration :" res += "\n - |cmd|/p <joueur>|ff| : promeut ou déchoit un joueur " res += "modérateur" res += "\n - |cmd|/ed|ff| : ouvre l'éditeur du canal" res += "\n - |cmd|/d|ff| : dissout le canal" personnage << res def opt_invite(self, arguments): """Option pour inviter un ami à rejoindre le cana : /i <joueur>""" canal = self.canal if not arguments or arguments.isspace(): self.pere.joueur << "|err|Vous devez spécifier un joueur.|ff|" return nom_joueur = arguments.split(" ")[0] joueur = None for t_joueur in type(self).importeur.connex.joueurs_connectes: if nom_joueur == t_joueur.nom.lower(): joueur = t_joueur break if joueur is None: self.pere.joueur << "|err|Le joueur passé en paramètre n'a pu " \ "être trouvé.|ff|" return if joueur in canal.connectes: self.pere.joueur << "|err|Ce joueur est déjà connecté au canal.|ff|" return contexte = Invitation(joueur.instance_connexion) contexte.emetteur = self.pere.joueur contexte.canal = canal contexte.actualiser() self.pere.joueur << "|att|Vous venez d'inviter {} à rejoindre le " \ "canal {}.|ff|".format(joueur.nom, canal.nom) def opt_emote(self, arguments): """Option d'emote dans le contexte immersif""" canal = self.canal joueur = self.pere.joueur if not arguments or arguments.isspace(): joueur << "|err|Vous devez préciser une action.|ff|" return message = arguments.rstrip(" \n") if not message[-1] in ponctuations_finales: message += "." im = canal.clr + "<" + joueur.nom + " " + message + ">|ff|" ex = canal.clr + "[" + canal.nom + "] " + joueur.nom + " " ex += message + "|ff|" for connecte in canal.connectes: if connecte in type(self).importeur.connex.joueurs_connectes: if connecte in canal.immerges: connecte << im else: connecte << ex def opt_eject(self, arguments): """Option permettant d'éjecter un joueur connecté : /e <joueur>""" canal = self.canal if not self.pere.joueur in canal.moderateurs and \ self.pere.joueur is not canal.auteur and not \ self.pere.joueur.est_immortel(): self.pere.joueur << "|err|Vous n'avez pas accès à cette option.|ff|" return if not arguments or arguments.isspace(): self.pere.joueur << "|err|Vous devez spécifier un joueur.|ff|" return nom_joueur = arguments.split(" ")[0] joueur = None for connecte in canal.connectes: if nom_joueur == connecte.nom.lower(): joueur = connecte break if joueur is None: self.pere.joueur << "|err|Ce joueur n'est pas connecté au " \ "canal.|ff|" return if joueur is self.pere.joueur: self.pere.joueur << "|err|Vous ne pouvez vous éjecter " \ "vous-même.|ff|" return if joueur in canal.moderateurs or joueur is canal.auteur: self.pere.joueur << "|err|Vous ne pouvez éjecter ce joueur.|ff|" return canal.ejecter(joueur) def opt_ban(self, arguments): """Option permettant de bannir un joueur connecté : /b <joueur>""" canal = self.canal if not self.pere.joueur in canal.moderateurs and \ self.pere.joueur is not canal.auteur and not \ self.pere.joueur.est_immortel(): self.pere.joueur << "|err|Vous n'avez pas accès à cette option.|ff|" return nom_joueur = arguments.split(" ")[0] joueur = None for t_joueur in type(self).importeur.connex.joueurs: if nom_joueur == t_joueur.nom.lower(): joueur = t_joueur break if joueur is None: self.pere.joueur << "|err|Le joueur passé en paramètre n'a pu " \ "être trouvé.|ff|" return if joueur is self.pere.joueur: self.pere.joueur << "|err|Vous ne pouvez vous bannir vous-même.|ff|" return if joueur in canal.moderateurs or joueur is canal.auteur: self.pere.joueur << "|err|Vous ne pouvez éjecter ce joueur.|ff|" return canal.bannir(joueur) def opt_announce(self, arguments): """Option permettant d'envoyer une annonce : /a <message>""" canal = self.canal if not self.pere.joueur in canal.moderateurs and \ self.pere.joueur is not canal.auteur and not \ self.pere.joueur.est_immortel(): self.pere.joueur << "|err|Vous n'avez pas accès à cette option.|ff|" return message = arguments.rstrip(" \n") canal.envoyer_imp(message) def opt_promote(self, arguments): """Option permettant de promouvoir un joueur connecté : /p <joueur>""" canal = self.canal if self.pere.joueur is not canal.auteur and not \ self.pere.joueur.est_immortel(): self.pere.joueur << "|err|Vous n'avez pas accès à cette option.|ff|" return nom_joueur = arguments.split(" ")[0] joueur = None for connecte in canal.connectes: if nom_joueur == connecte.nom.lower(): joueur = connecte break if joueur is None: self.pere.joueur << "|err|Ce joueur n'est pas connecté au " \ "canal.|ff|" return if joueur is self.pere.joueur: self.pere.joueur << "|err|Vous ne pouvez vous promouvoir " \ "vous-même.|ff|" return if joueur is canal.auteur: self.pere.joueur << "|err|Ce joueur est déjà administrateur.|ff|" return canal.promouvoir_ou_dechoir(joueur) def opt_edit(self, arguments): """Option ouvrant un éditeur du canal""" canal = self.canal if self.pere.joueur is not canal.auteur and not \ self.pere.joueur.est_immortel(): self.pere.joueur << "|err|Vous n'avez pas accès à cette option.|ff|" return editeur = type(self).importeur.interpreteur.construire_editeur( "chedit", self.pere.joueur, canal) self.pere.joueur.contextes.ajouter(editeur) editeur.actualiser() def opt_dissolve(self, arguments): """Option permettant de dissoudre le canal""" canal = self.canal if self.pere.joueur is not canal.auteur and not \ self.pere.joueur.est_immortel(): self.pere.joueur << "|err|Vous n'avez pas accès à cette option.|ff|" return joueur = self.pere.joueur canal.immerger_ou_sortir(joueur, False) canal.rejoindre_ou_quitter(joueur, False) joueur << "|err|Le canal {} a été dissous.|ff|".format(canal.nom) canal.dissoudre() def interpreter(self, msg): """Méthode d'interprétation du contexte""" if msg.startswith("/"): # C'est une option # On extrait le nom de l'option mots = msg.split(" ") option = mots[0][1:] arguments = " ".join(mots[1:]) if option not in self.options.keys(): self.pere << "|err|Option invalide ({}).|ff|".format(option) else: # On appelle la fonction correspondante à l'option fonction = self.options[option] fonction(arguments) else: self.canal.envoyer(self.pere.joueur, msg)
fire a topic. Digital photography is actually only one brand new means from taking a. face. The skin is crucial to the perspective you are going to shoot coming from. This. is actually an adage, it holds true some folks possess a far better edge. This might be in. Eye is larger compared to my right through only a little, and also looking mind on right into the. video camera will certainly create a dreadful picture, having said that if the digital photographer. are going to aid even out the features while keeping the distinctions that create. close-up, a middle variety and also a large slant. The Angel is quite important as. properly. , if you make use of the incorrect place from sight you may effortlessly reverse the ideal. . Remember, your image must reveal specifically just what is laid out prior to you. Lighting is the upcoming crucial measure to portraiture digital photography. A bunch of. freelance photographers favor organic illumination to synthetic. Your subject will definitely in part. electronic cameras lack of interpretation. To puts it simply you require to generate the. artificial illumination is severe as well as not guided effectively unless in a workshop. Again you will have to utilize the manual setups on your electronic camera to. Definitely, the key to any picture is to film the personality and. the "soul" of your subject. I was actually photographing a kid precursor in his. Due to the fact that the child, uniform for an extremely important picture to the family members. had accomplished the Eagle ranking, which is actually a higher praise. Yet I understood this little one. had great deals of character so I preferred the "professional" shot yet I desired this. kid's center in that too. I told him I would be actually popping a couple of shots to. exam my shutter and I acquired him discussing kid recruiters as well as outdoor camping. As. outdoor camping, that grin emerged as well as boom, I had my try. This awaits my. The history is additionally crucial when discussing illumination. In a studio a. freelance photographer are going to choose a background that will certainly not wash your components out. you and also the background. This is actually additionally vital of home portraiture. photography. You will definitely prefer an experience that offers different colors to your. topic as opposed to having that out. Discovering a wonderful spot along with a tree to sit. on as well as mountains behind-the-scenes could be a fantastic picture, yet you will. wish to see to it the background is actually certainly not also hectic to distract from the. subject matter. Create the subject appear apart of the imagine instead of outside. That holds true that the technical skill that the fella at Wal-Mart might have to. have child photos may be the exact same as a fantastic photo performer. individual foreign language is actually utilized to create fantastic poems as it takes holler out. obscenities at a baseball online game. It isn't really the ability that makes that fine art. However not the very least. If your equipment, photos can be messed up simply. is not in recommendation leading shape. See to it that your lens is clean in any way times of. dust. No smutches and so on. I understand it seems to be a topic that ought to not. even must be pointed out, yet usually it is the little points our company disregard. craft planet and also a market behind this that rely on there being actually some. the artist. If you produce a fine art or even a picture work derived off a. photograph that picture intended is actually meant viewed as seen, the viewer is actually. obligated to aim to observe the artistic merit in it. Whether the visitor sees. that merit or not might rely on the customer's capacities, just how good you. are at receiving your creative message throughout or a lot of various other elements. Again your eye is actually the very best tool for discovering a target. How you opt for. the subject matter is going to rely on exactly what is actually accessible, the slant and also the light. Relocating slowly by means of a place like garden will aid you. figure out the subject. Seeming under stones or even leaves is actually typically. good for discovering something brand new and also different. You certainly never recognize. where you will definitely discover a photo merely awaiting you to click an image. Some. animals and people perform factors that are going to never again occur and also this is actually. seem like a habit, an actual practice transformeds into an activity and also a possible. If you turn into good at taking the ideal images, profit. As you get. better at having the photos, you can after that start showing your imagines. picture. The face is actually essential to the perspective you will fire coming from. Remember you will definitely utilize the lighting you possess. Digital photography is actually only one brand-new technique from having a.
from os.path import abspath, dirname, join from setuptools import setup PROJECT_ROOT = abspath(dirname(__file__)) long_description = open(join(PROJECT_ROOT, 'README.rst')).read() description = ( 'Pykka (actor model implementation) and Injector ' '(dependency injection framework) integration module' ) module_code = open(join(PROJECT_ROOT, 'pykka_injector.py')).readlines() line = [line for line in module_code if line.startswith('__version__ = ')][0] version = line.split('=')[-1].strip().strip("'") if __name__ == '__main__': setup( name='pykka-injector', url='http://github.com/jstasiak/pykka-injector', download_url='http://pypi.python.org/pypi/pykka-injector', version=version, description=description, long_description=long_description, license='MIT', platforms=['any'], py_modules=['pykka_injector'], author='Jakub Stasiak', author_email='jakub@stasiak.at', install_requires=[ 'setuptools >= 0.6b1', 'pykka', 'injector', ], keywords='Dependency Injection,Injector,Pykka', )
But to determine the standard of what’s unsafe, EWG doesn’t use the EPA’s standard. Nor does the group use California’s much lower standard of 1.1 mg per day (for a 154-pound adult). Crossing the California standard moves the cancer chances to one in 100,000 for an average adult. EWG extrapolated the glyphosate standard it applied for testing by applying a couple of adjustments to California’s standard. First, EWG decided to use California’s line for drinking water contaminants, which has a stricter standard than the EPA’s. Then, EWG used the increased vulnerability of children, whose immune systems aren’t fully developed, as the reason to apply the “10 times safety factor” in the 1996 EPA Food Quality Protection Act. “With this additional children’s health safety factor, EWG calculated that a one-in-a-million cancer risk would be posed by ingestion of 0.01 milligrams of glyphosate per day,” EWG’s report states. That’s a standard that’s 1/100th the amount of glyphosate California considers safe for adults and about 1/200th of what the EPA says you can eat daily without a problem. Grading on the EWG’s curve, 31 of 45 products failed the testing done for EWG by Eurofin Analytical Laboratories. EWG says a 60-gram serving would be two cups of Cheerios (average of 497ppb in EWG’s tests) or 3/4 cup of Quaker Old Fashioned Oats (average of 930ppb in EWG’s tests). That’s about 1/4 cup less than a single soup bowl of cereal. As for the Quaker Oats, Quaker’s directions say 3/4 cup is enough for one-and-a half servings. While both Cheerios and Quaker Old Fashioned Oats exceed the EWG standard, neither comes close to being a problem under the California standard or the EPA standard. You’d have to eat almost 28 bowls of Cheerios per day or 26 servings of Old Fashioned Oats for the state of California to consider your breakfast a cancer risk.
# coding: utf-8 # ### Amazon Sentiment Data # In[ ]: import numpy as np import lxmls.readers.sentiment_reader as srs from lxmls.deep_learning.utils import AmazonData corpus = srs.SentimentCorpus("books") data = AmazonData(corpus=corpus) # ### Exercise 2.2 Implement Backpropagation for an MLP in Numpy and train it # Instantiate the feed-forward model class and optimization parameters. This models follows the architecture described in Algorithm 10. # In[ ]: # Model geometry = [corpus.nr_features, 20, 2] activation_functions = ['sigmoid', 'softmax'] # Optimization learning_rate = 0.05 num_epochs = 10 batch_size = 30 # In[ ]: from lxmls.deep_learning.numpy_models.mlp import NumpyMLP model = NumpyMLP( geometry=geometry, activation_functions=activation_functions, learning_rate=learning_rate ) # #### Milestone 1: # Open the code for this model. This is located in # # lxmls/deep_learning/numpy_models/mlp.py # # Implement the method `backpropagation()` in the class `NumpyMLP` using Backpropagation recursion that we just saw. # # As a first step focus on getting the gradients of each layer, one at a time. Use the code below to plot the loss values for the study weight and perturbed versions. # In[ ]: from lxmls.deep_learning.mlp import get_mlp_parameter_handlers, get_mlp_loss_range # Get functions to get and set values of a particular weight of the model get_parameter, set_parameter = get_mlp_parameter_handlers( layer_index=1, is_bias=False, row=0, column=0 ) # Get batch of data batch = data.batches('train', batch_size=batch_size)[0] # Get loss and weight value current_loss = model.cross_entropy_loss(batch['input'], batch['output']) current_weight = get_parameter(model.parameters) # Get range of values of the weight and loss around current parameters values weight_range, loss_range = get_mlp_loss_range(model, get_parameter, set_parameter, batch) # Once you have implemented at least the gradient of the last layer. You can start checking if the values match # In[ ]: # Get the gradient value for that weight gradients = model.backpropagation(batch['input'], batch['output']) current_gradient = get_parameter(gradients) # Now you can plot the values of the loss around a given parameters value versus the gradient. If you have implemented this correctly the gradient should be tangent to the loss at the current weight value, see Figure 3.5. Once you have completed the exercise, you should be able to plot also the gradients of the other layers. Take into account that the gradients for the first layer will only be non zero for the indices of words present in the batch. You can locate this using. # In[ ]: # Use this to know the non-zero values of the input (that have non-zero gradient) batch['input'][0].nonzero() # Copy the following code for plotting # In[ ]: import matplotlib.pyplot as plt # Plot empirical plt.plot(weight_range, loss_range) plt.plot(current_weight, current_loss, 'xr') plt.ylabel('loss value') plt.xlabel('weight value') # Plot real h = plt.plot( weight_range, current_gradient*(weight_range - current_weight) + current_loss, 'r--' ) plt.show() # #### Milestone 2: # After you have ensured that your Backpropagation algorithm is correct, you can train a model with the data we have. # In[ ]: # Get batch iterators for train and test train_batches = data.batches('train', batch_size=batch_size) test_set = data.batches('test', batch_size=None)[0] # Epoch loop for epoch in range(num_epochs): # Batch loop for batch in train_batches: model.update(input=batch['input'], output=batch['output']) # Prediction for this epoch hat_y = model.predict(input=test_set['input']) # Evaluation accuracy = 100*np.mean(hat_y == test_set['output']) # Inform user print("Epoch %d: accuracy %2.2f %%" % (epoch+1, accuracy))
The reverser clams are visible on the center engine in this view. Bill getting ready to freak-out the neighbors! SFC with AB: 2.40 lb/hr/lb thrust.
from __future__ import division import os import requests from tqdm import tqdm import numpy as np from imageio import imread from skimage import transform from neupy.utils import asfloat CURRENT_DIR = os.path.abspath(os.path.dirname(__file__)) FILES_DIR = os.path.join(CURRENT_DIR, 'files') IMAGENET_CLASSES_FILE = os.path.join(FILES_DIR, 'imagenet_classes.txt') def download_file(url, filepath, description=''): head_response = requests.head(url) filesize = int(head_response.headers['content-length']) response = requests.get(url, stream=True) chunk_size = int(1e7) n_iter = (filesize // chunk_size) + 1 print(description) print('URL: {}'.format(url)) with open(filepath, "wb") as handle: for data in tqdm(response.iter_content(chunk_size), total=n_iter): handle.write(data) print('Downloaded sucessfully') def read_image(image_name, image_size=None, crop_size=None): image = imread(image_name, pilmode='RGB') if image_size is not None: height, width, _ = image.shape new_height, new_width = image_size if height < width: # Since width is bigger than height, this scaler # factor will say by how much it bigger # New width dimension will be scaled in the way # that output image will have proportional width and # height compae to it's original size proportion_scaler = width / height image_size = (new_height, int(new_width * proportion_scaler)) else: proportion_scaler = height / width image_size = (int(new_height * proportion_scaler), new_width) image = transform.resize( image, image_size, preserve_range=True, mode='constant') if crop_size is not None: height, width, _ = image.shape height_slice = slice( (height - crop_size[0]) // 2, (height + crop_size[0]) // 2) width_slice = slice( (width - crop_size[1]) // 2, (width + crop_size[1]) // 2) image = image[height_slice, width_slice, :] # (height, width, channel) -> (1, height, width, channel) image = np.expand_dims(image, axis=0) return asfloat(image) def process(image, use_bgr): # Per channel normalization image[:, :, :, 0] -= 123.68 image[:, :, :, 1] -= 116.78 image[:, :, :, 2] -= 103.94 if use_bgr: # RGB -> BGR image[:, :, :, (0, 1, 2)] = image[:, :, :, (2, 1, 0)] return image def load_image(image_name, image_size=None, crop_size=None, use_bgr=True): image = read_image(image_name, image_size, crop_size) return process(image, use_bgr) def deprocess(image): image = image.copy() # BGR -> RGB image[:, :, (0, 1, 2)] = image[:, :, (2, 1, 0)] image[:, :, 0] += 123.68 image[:, :, 1] += 116.78 image[:, :, 2] += 103.94 return image.astype(int) def top_n(probs, n=5): if probs.ndim == 2: probs = probs[0] # take probabilities for first image with open(IMAGENET_CLASSES_FILE, 'r') as f: class_names = f.read().splitlines() class_names = np.array(class_names) max_probs_indices = probs.argsort()[-n:][::-1] class_probs = probs[max_probs_indices] top_classes = class_names[max_probs_indices] return top_classes, class_probs def print_top_n(probs, n=5): top_classes, class_probs = top_n(probs, n) print('-----------------------') print('Top-{} predicted classes'.format(n)) print('-----------------------') for top_class, class_prob in zip(top_classes, class_probs): print("{:<80s}: {:.2%}".format(top_class, class_prob)) print('-----------------------')
The Casper Event Center will host the 2019 PBR (Professionals Bull Riders) Velocity Tour in Casper, WY on Saturday, March 30, 2019. Get your special presale code here. For one night, some of the best bull riders in the world will battle the sport’s fiercest bovine athletes. The thrilling 8-second rides and heart-stopping wrecks make the PBR’s Velocity Tours one of the most exciting live sporting events to see. Guarantee the spot you want buy buying presale tickets - available Wednesday, December 19 at 10am – Thursday, December 20 at midnight. Tickets go on sale Friday, December 21st to the general public but you have the opportunity to buy tickets now before they officially go on sale by using the code below. Tickets will be available Friday, December 21st at the Casper Events Center Box Office, online at www.sinclairtix.com, and by phone at (800) 442-2256.
from gi.repository import Gst, GES, GLib import os class Simple: def __init__(self, uri): timeline = GES.Timeline() trackv = GES.Track.video_raw_new() self.layer = GES.Layer() self.pipeline = GES.TimelinePipeline() self.pipeline.add_timeline(timeline) timeline.add_track(trackv) timeline.add_layer(self.layer) GES.Asset.new_async(GES.UriClip, uri, None, self.discoveredCb, None) self.loop = GLib.MainLoop() self.loop.run() def discoveredCb(self, asset, result, blop): self.layer.add_asset(asset, long(0), long(0), long(10 * Gst.SECOND), 1.0, GES.TrackType.VIDEO) self.start() def busMessageCb(self, bus, message, udata): if message.type == Gst.MessageType.EOS: print "EOS" self.loop.quit() if message.type == Gst.MessageType.ERROR: print "ERROR" self.loop.quit() def start(self): self.pipeline.set_state(Gst.State.PLAYING) self.pipeline.get_bus().add_watch(GLib.PRIORITY_DEFAULT, self.busMessageCb, None) if __name__ == "__main__": if len(os.sys.argv) < 2: print "You must specify a file URI" exit(-1) GES.init() # And try! Simple(os.sys.argv[1])
As teens from across the country descend on Washington, D.C., for a massive march to end gun violence, an underground network of high school students is helping to secure hundreds of homes for their out-of-town peers. The organizers — five friends from Walter Johnson High School in Bethesda, Md. — have set up temporary housing for at least 200 students from as far as California as they travel to participate in Saturday’s March For Our Lives. Hundreds of thousands of protesters are expected to demand school safety measures and push for stronger gun restrictions in response to the mass shooting at Marjory Stoneman Douglas High School in Parkland, Fla, which left 17 students and staff members dead. In just two weeks, Mai and her friends have convinced about 300 hosts to open their homes, churches, synagogues, college dorm rooms and rental properties along D.C.’s Metro transit system for free over the weekend. The housing network heavily relies on social media and the use of shared Google documents, but it’s a concept with longstanding roots in activism circles. Like many young activists, the students from Maryland say they were moved to action out of frustration following the Parkland shooting. The friends said they knew many of their peers wouldn’t be able to attend the March For Our Lives in Washington without help. They began spreading the word about their home-sharing network for March For Our Lives at the end of February, and soon several dozen students and hosts signed up. The young organizers then compared the online forms each participant had to fill out to match like-minded individuals. While organizers can’t guarantee anyone’s safety, they have been trying to screen for potentially dangerous situations. For example, they contact parents for permission when students under 18 sign up, and they ask for letters of recommendations from hosts’ employers as part of a background check. They also do their best to pair students with families. Philip Freeman, a 17-year-old high school student from St. Louis, has been texting and communicating on Snapchat with his Maryland host family for the last two weeks. He said he wouldn’t have been able to take part in March For Our Lives without the home-sharing network. Now, the teen from Missouri — who wants to pursue a career in politics — is counting down the days until he’s in Washington, pressuring leaders to enact tighter restrictions on gun or get voted out of office, and marching alongside thousands of his peers, including the survivors of the Parkland shooting. “There’s no more reason not to go,” he said.
import collections.abc import gzip import sys from io import BufferedReader, BytesIO, IOBase from typing import BinaryIO, IO, Iterable, Union IOWrappable = Union[bytes, bytearray, BinaryIO, Iterable[int]] def get_length(stream: IO) -> int: """Gets the number of bytes in the stream.""" old_position = stream.tell() stream.seek(0) length = 0 try: while True: r = stream.read(1024) if not r: break length += len(r) finally: stream.seek(old_position) return length class IOWrapper(collections.abc.Sequence): def __init__(self, wrapped: IOWrappable): self.wrapped = wrapped self._file = None def new_instance(self): if self.wrapped == '-': return sys.stdin elif isinstance(self.wrapped, IOWrapper): return self.wrapped.new_instance() elif isinstance(self.wrapped, IOBase): return self.wrapped elif isinstance(self.wrapped, collections.abc.Iterable): if not isinstance(self.wrapped, bytes) and not isinstance(self.wrapped, bytearray): return BytesIO(bytes([b for b in self.wrapped])) else: return BytesIO(self.wrapped) else: return open(self.wrapped, 'rb') def __len__(self): if isinstance(self.wrapped, collections.abc.Sized): return len(self.wrapped) else: with self.new_instance() as f: return get_length(f) def __getitem__(self, index: Union[slice, int]) -> Union[int, bytes]: if isinstance(self.wrapped, collections.abc.Sequence): return self.wrapped[index] else: with self.new_instance() as f: old_position = f.tell() try: if isinstance(index, slice): if index.start is None: index = slice(0, index.stop, index.step) if index.stop is None: index = slice(index.start, len(self), index.step) if index.step is None or index.step == 1: f.seek(index.start) return f.read(index.stop - index.start) else: ret = bytearray() for i in range(index.start, index.stop, index.step): f.seek(i) r = f.read(1) if r is None or len(r) < 1: break ret.append(r) return bytes(ret) else: r = f.read(1) if r is None or len(r) < 1: return None else: return r[0] finally: f.seek(old_position) def __enter__(self): f = self.new_instance() if f is not self.wrapped: self._file = f return f.__enter__() def __exit__(self, type, value, tb): if self._file is not None: self._file.__exit__(type, value, tb) self._file = None class GzipIOWrapper(IOWrapper): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def new_instance(self): return gzip.GzipFile(fileobj=super().new_instance()) GZIP_MAGIC = b'\x1F\x8B' class AutoUnzippingStream: def __init__(self, stream: IOWrappable): self.__stream = stream self.__to_close = None def __enter__(self): if self.__to_close is not None: raise Exception(f"{self!r} is already a context manager") stream = IOWrapper(self.__stream) reader = BufferedReader(stream.__enter__()) to_close = [reader] if reader.peek(len(GZIP_MAGIC)) == GZIP_MAGIC: ret = GzipIOWrapper(reader) to_close.append(ret) ret = ret.__enter__() else: ret = reader self.__to_close = (stream,) + tuple(to_close) return ret def __exit__(self, *args, **kwargs): try: for stream in self.__to_close: stream.__exit__(*args, **kwargs) finally: self.__to_close = None
Sign up for my email list to receive news about new art and events. Look closely and you will see that the center circle is the iris of an eye. This painting has a yellow border. The title is at the bottom left just below the signature. The art is currently framed and ready to hang. This piece can be shipped with or without the frame.
#!/usr/local/bin/python import numpy as np import matplotlib render = True if render: matplotlib.use('pgf') import matplotlib.pyplot as plt from matplotlib.ticker import MaxNLocator # added import matplotlib.mlab as mlab from string import split import scipy.signal as signal import pysndfile import math import os import re import json def main(): fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, sharey=True) cmap = plt.get_cmap('viridis') def plt_file(ax, file_name, name): sndfile = pysndfile.PySndfile(file_name, 'r') if sndfile.channels() != 1: raise RuntimeError('please only load mono files') Fs = sndfile.samplerate() signal = sndfile.read_frames() time = np.arange(len(signal)) / float(Fs) ax.plot(time, signal) ax.text(0.001, 0.75, name) ax1.set_xlabel('time / s') ax1.set_xlim([0, 0.05]) fig.subplots_adjust(hspace=0) plt.setp([a.get_xticklabels() for a in fig.axes[:-1]], visible=False) times = [ 3.0 / 340.0, 5.0 / 340.0, 11.0 / 340.0] for ax in fig.axes: ax.set_ylabel('amplitude') for t in times: ax.axvline(t, linestyle='dotted', color='red') plt_file(ax0, 'away.wav', 'away') plt_file(ax1, 'toward.wav', 'toward') ax1.yaxis.set_major_locator(MaxNLocator(prune='upper')) # added plt.suptitle('Early Response for Cardoid Receivers Pointing Toward and Away from Source') #plt.tight_layout() #plt.subplots_adjust(top=0.9) plt.show() if render: plt.savefig('cardioid.svg', bbox_inches='tight', dpi=96, format='svg') if __name__ == '__main__': pgf_with_rc_fonts = { 'font.family': 'serif', 'font.serif': [], 'font.sans-serif': ['Helvetica Neue'], 'legend.fontsize': 12, } matplotlib.rcParams.update(pgf_with_rc_fonts) main()
Today I would like to introduce you to a wonderful UK based brand I have found, Milk Moustache Apparel. Like quite a few enterprising Mums, Sarah the owner of Milk Moustache discovered the lack of cool clothing available in the UK for her little one. Sarah found that some beautiful clothing was being created from gorgeous fabric in the US and Austrailia. Not wanting to incur customs charges and taxes, she decided the best thing to do would be to order fabric and make clothing herself! And so, Milk Moustache was born! Now, I would love to be one of those Mums who “simply” orders some fabric, whips out a sewing machine and creates some masterpieces.. but as yet, I don’t own a sewing machine and wouldn’t know where to start! No, I’m afraid I am one of those Mums who happily purchases other’s creations! What I really love about Sarah and her brand Milk Moustache Apparel, is that she has designed one of her fabric prints, pop corn, in collaboration with Cheryl Rawlings and Sarah has plans for even more unique fabric designs! I love it when new brands go that one step further to make themselves stand out. Sarah stocks all of our fave, beautiful shorts and leggings, stunning rompers and some very handy dribble bibs, burp clothes and even teething rings. There is just one t shirt design in Milk Moustache ‘s collection so far, however, I’m sure there will be more in the future! Milk Moustache is run alongside Sarah’s full time job AND being a mother. I honestly don’t know how she can be doing all of that. All I can do is support her by sending her my pennies in exchange for some beautiful clothing! Sarah has very kindly offered Junior Style London readers 10% off. Simply enter the code Hello10 at the the Milk Moustache checkout! What do you think of Sarah’s pop corn material? Do you have a favorite item? Previous articleThe Perfect Brand For The Summer Holiday, St Berts.
#!/usr/bin/python3 # control.py - USB hotplug handling for Franklin # Copyright 2014-2016 Michigan Technological University # Copyright 2016 Bas Wijnen <wijnen@debian.org> # Author: Bas Wijnen <wijnen@debian.org> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import websocketd import os import sys port = 8000 tls = False user = None password = None current_level = 0 def credentials(level, value): global current_level if level < current_level: return current_level = level if ':' in value: user, password = value.split(':', 1) else: user = 'admin' password = value with open('/etc/default/franklin') as f: for l in f.readlines(): l = l.strip() if l == '' or l.startswith('#') or not '=' in l: continue key, value = l.split('=', 1) if key == 'PORT': # Leave it as a string because it need not be numerical. port = value.strip() if key == 'TLS': tls = value.lower().strip() in ('1', 'true') if key == 'USER': credentials(0, value.strip()) if key == 'EXPERT': credentials(1, value.strip()) if key == 'ADMIN': credentials(2, value.strip()) try: p = websocketd.RPC(port, tls = tls, url = '/admin', user = user, password = password) action = os.getenv('ACTION') dev = os.getenv('DEVNAME') if action == 'add': p.add_port(dev) elif action == 'remove': p.remove_port(dev) except: sys.stderr.write('Failed to handle serial port event for Franklin')
Who wants to go wedding dress shopping with me?? Picking the perfect dress can be hard but when you know you know. That may sound cliche, but it's the truth. I tried on about 10 dresses but kept coming back to a certain one, and in the end it was the dress! There are so many to choose from like A-line, Fit and Flare, Ball Gowns.... to fabrics like lace and silk organza. It is easy to get caught up in the mix and like them all, but this is where your bridal stylist/consultant comes in to help you find that perfect one. I found my one at Modern Trousseau Louisville. Modern Trousseau is a couture bridal boutique that showcases the designs of Australian born, Callie Tein. I knew it was the perfect place for me considering I am marrying an Aussie. Modern Trousseau has Flagship Stores in: Louisville, Baltimore, Nashville, Charleston, New Haven, New York, and Savannah. Flag Ship Stores coming soon to: Charlotte, Cincinnati, St. Louis, DC, and Long Island. You can also see their unique designs in many other bridal shops around the US (find them here). At their Flagship stores, find your perfect custom made dress that fits your needs for your special day. The easiest way to select the dress is to have a budget and an idea in mind. Obviously a budget keeps you from over spending and trying on dresses that you will fall in love with and cannot afford. So save yourself and your bank account the stress. Skimming through bridal magazines and online can help you come up with a couple designs that you like. This can narrow down the amount of time you spend perusing through racks of dresses, and it will definitely cut down the number you try on. Honestly the more you try on the more it muddies the water in your mind of that perfect one. Leah-Terese that runs the Modern Trousseau Louisville Flagship Store was the perfect host for me and my guests. she worked with me explaining the different fits and fabrics, and helped me narrow down my choices, until I picked my perfect one. There are many reasons why I personally loved finding my dress at Modern Trousseau. I remember about a year ago (before I was engaged) walking pass this shop one night heading to meet up with some friends, I stopped and looked into the window and was in awe of all the beautiful dresses and I thought to myself "When I get married that is where my dress will be"! When the time came, I was right! Modern Trousseau sets its self apart from other bridal boutiques because their dresses are custom to fit your body. Your measurements are taken and your dress is made from start to finish to fit you. So you are not buying a dress off the rack that has to be cut down or reconstructed to fit your body type. You custom pick the lace and color of your dress, you can mix and match from other dresses (the top of one and the bottom of another), and add a belt for a little sparkle until it is made into your perfect creation. If you are a bride and looking for that one of a kind dress, let me suggest Modern Trousseau. You won't regret it! You can request an appointment here. Here is to happy stress free shopping because you will already have enough stress planning! You always have to cheers when you find the dress! Pia - Fit to Flare gown with alternating panels of alençon lace and French net. Added a belt for sparkle. Odette - French Lace and Italian Silk Organza strapless ballgown. It has a scoop neckline, a low V back and a sweep train. Added a belt for sparkle.
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### import bpy from . import bgee_config, bgee_component, bgee_tagslayers class GameEditorEntityPanel(bpy.types.Panel): bl_idname = "bgee_entity_panel" bl_label = "Entity" bl_space_type = 'VIEW_3D' bl_region_type = 'TOOLS' bl_category = "Entity" def draw(self, context): gm = context.blend_data.objects["GameManager"] layout = self.layout # Selected entities names, location, rotation, scale row = layout.row(align=True) row.label(text="Entities selected:") row = layout.row(align=True) box = row.box() for ob in context.selected_objects: boxrow = box.row(align=True) boxrow.label(text=ob.name) boxrow.prop(ob.entityProps, "active") boxrow = box.row(align=True) boxrow.prop(ob.entityProps, "prefab") boxrow = box.row(align=True) if (len(context.selected_objects) > 1): # Check tag equals pass else: boxrow.label("Tag:") boxrow.prop(context.active_object, "bgeeTag") boxrow = box.row(align=True) boxrow.label("Layers:") boxcolumn = boxrow.column(align=True) for layId,lay in enumerate(context.active_object.entityProps.layers): boxrowcolumn = boxcolumn.row(align=True) boxrowcolumn.label(lay.first) if (lay.first != "None"): remParam = boxrowcolumn.operator("bgee.remove_entity_layer", icon="X") remParam.selectedLayer = layId remParam.selectedEntity = context.active_object.name boxcolumn.separator() boxrowcolumn = boxcolumn.row(align=True) boxrowcolumn.prop(gm, "bgeeLayer") addOp = boxrowcolumn.operator("bgee.add_entity_layer", "Add") addOp.selectedEntity = ob.name addOp.selectedLayer = gm.bgeeLayer row = layout.row(align=True) row.prop(gm.entityTransform, "location") row = layout.row(align=True) row.prop(gm.entityTransform, "rotation") row = layout.row(align=True) row.prop(gm.entityTransform, "scale") # START Multiselection transform methods def update_location(self, context): gm = context.blend_data.objects["GameManager"] obs = context.selected_objects for ob in obs: ob.location = gm.entityTransform.location def update_rotation(self, context): gm = context.blend_data.objects["GameManager"] obs = context.selected_objects for ob in obs: ob.rotation_euler = gm.entityTransform.rotation def update_scale(self, context): gm = context.blend_data.objects["GameManager"] obs = context.selected_objects for ob in obs: ob.scale = gm.entityTransform.scale def update_transform(context): # TODO: NOT WORKING WELL gm = context.blend_data.objects["GameManager"] obs = context.selected_objects if (len(obs) > 0): locationX, locationY, locationZ, rotationX, rotationY, rotationZ, scaleX, scaleY, scaleZ = True, True, True, True, True, True, True, True, True # Location for ob in obs: if (obs[0].location.x != ob.location.x): locationX = False if (locationX): gm.entityTransform.location[0] = obs[0].location[0] else: print("No object selected") # END Multiselection transform methods class MultiEntityTransform(bpy.types.PropertyGroup): location = bpy.props.FloatVectorProperty(update=update_location) rotation = bpy.props.FloatVectorProperty(subtype="EULER", update=update_rotation) scale = bpy.props.FloatVectorProperty(update=update_scale) class BGEE_OT_multiselection(bpy.types.Operator): bl_idname = "bgee.multiselection" bl_label = "Entity multiselection catcher" _updating = False _calcs_done = False _timer = None nObjects = None def selected_objects_changed(self, context): currentNObjects = len(context.selected_objects) if (self.nObjects is not None): if (self.nObjects != currentNObjects): self.nObjects = currentNObjects return True else: return False else: self.nObjects = currentNObjects return False #self._calcs_done = True def modal(self, context, event): if event.type == 'TIMER' and not self._updating: self._updating = True if (self.selected_objects_changed(context)): update_transform(context) self._updating = False if self._calcs_done: self.cancel(context) return {'PASS_THROUGH'} def execute(self, context): context.window_manager.modal_handler_add(self) self._updating = False self._timer = context.window_manager.event_timer_add(0.2, context.window) return {'RUNNING_MODAL'} def cancel(self, context): context.window_manager.event_timer_remove(self._timer) self._timer = None return {'CANCELLED'} # ENTITY PROPERTIES class EntityProperties(bpy.types.PropertyGroup): active = bpy.props.BoolProperty(default=True, name="Active") prefab = bpy.props.BoolProperty(default=False, name="Prefab") #tag = bpy.props.EnumProperty(items=bgee_config.bgeeCurrentTags) layers = bpy.props.CollectionProperty(type=bgee_tagslayers.LayerItem) components = bpy.props.CollectionProperty(type=bgee_component.ObjectComponent) class BGEE_OT_add_entity_layer(bpy.types.Operator): bl_idname = "bgee.add_entity_layer" bl_label = "Add Entity Layer" selectedLayer = bpy.props.StringProperty() selectedEntity = bpy.props.StringProperty() def execute(self, context): entity = bpy.data.objects[self.selectedEntity] # If same layer in dont add same = False for lay in entity.entityProps.layers: if (self.selectedLayer == lay.first): same = True if (not same): # If None is present if (len(entity.entityProps.layers) > 0): if (entity.entityProps.layers[0].first == "None"): entity.entityProps.layers.remove(0) # If None is selected if (self.selectedLayer == "None"): while (len(entity.entityProps.layers) > 0): entity.entityProps.layers.remove(0) addedLayer = entity.entityProps.layers.add() addedLayer.first, addedLayer.second, addedLayer.third = self.selectedLayer, self.selectedLayer, self.selectedLayer return {'FINISHED'} class BGEE_OT_remove_entity_layer(bpy.types.Operator): bl_idname = "bgee.remove_entity_layer" bl_label = "" selectedLayer = bpy.props.IntProperty() selectedEntity = bpy.props.StringProperty() def execute(self, context): entity = bpy.data.objects[self.selectedEntity] entity.entityProps.layers.remove(self.selectedLayer) if (len(entity.entityProps.layers) < 1): noneLayer = entity.entityProps.layers.add() noneLayer.first, noneLayer.second, noneLayer.third = "None", "None", "None" return {'FINISHED'} ''' COMING SOON class EntityList(bpy.types.UIList): def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): ob = data slot = item ma = slot.material if self.layout_type in {'DEFAULT', 'COMPACT'}: layout.label(ob.name) elif self.layout_type in {'GRID'}: layout.alignment = 'CENTER' layout.label("", icon_value=icon) '''
TAURANGA, NEW ZEALAND – Karen Hay rewrote history on Sunday 6th April as she became New Zealand’s quickest and fastest female in her purpose built 1927 Ford Model T roadster powered by a twin turbocharged 482 cubic inch big block Chev engine. The record was previously held by Faye Grant from 1990 with a longer wheelbase rear engine dragster running a 6.629 second pass over the quarter mile at 205.52 miles per hour. Having already run 208 miles per hour in 2012, Karen ran 6.61 seconds at 206 miles per hour over the quarter mile to become BOTH NZ’s fastest & quickest female in drag racing history. It’s a goal which Hay has been chasing since 2010 when the Hay Family Racing team purchased the car named “Evil II” from the late Clive Davis. The mother of two started racing in 2002 after crewing for her father Lindsay for a couple of years and in 2010 she ran her first 6 second pass. The car was built by the late Warren Brogie in California, USA and has now become the world’s quickest and fastest Brogie-built roadster which adds to the excitement of the NZ record. Last weekend’s event at Fram Autolite Dragway, Meremere was the Bay Rodders hosted Nostalgia Drags and Hay describes the day she wrote history as the best day of her life. “I was hoping we were going to do it (the record) for many reasons this weekend. This meet is where my racing all started. Fram Autolite Dragway deserved to have me run the record on their track because their support has been unwavering for 12 years! I have lost two crew members along the way of this journey. I asked them in my cockpit just after my final burn out if you can help me out in any way boys can we do it now just for Dad?”. Hay credits her success in drag racing to her parents, Lindsay & Shona Hay and their involvement in hot rodding, jetboat racing and drag racing. “Hugging Dad on the return road with us both crying with tears of joy and relief is a moment I will never forget. I had always envisaged us doing it one day. What a man my father is? He is the hardest working person I know, who has the utmost integrity, he'd do anything for anyone, he never gives up, he shows kindness to many and he and Mum have given me the opportunity that people can only dream of. The life I have been given by Mum and Dad and the support and love I have from my family and friends truly blows me away”. This is a story of one goal, a close knit family and crew blended with the dedication, determination, commitment and a Kiwi can-do attitude to achieve the dream. This past season was challenging with the new set-up of turbo chargers with electronic fuel injection but Hay always had faith in her Dad and both father & daughter worked away towards their dream and achieved it. With the assistance of Robert Ward of RIPS Racing and Jason Cutelli of Infomotive (both based in Rotorua) the car consistently ran in the six second at 200 mph hour zone all weekend getting faster and quicker on each run. It was a dream weekend which also included running the rain dated NZ Nationals final on theSaturday and winning the Competition class as well as resetting the record for the car classification of AA/Altered. Not one to rest on their laurel’s Hay and the team are already talking about getting the car to run even faster and quicker next year. Hay Family Racing would like to thank their competitors, track workers, race fans and the following who have helped directly with the car; RIPS Racing, Infomotive, Spec Rite Torque Convertors, Segedins of Dominion Road, Moran Motorsports, Chuck Mann, Bill Brinsden & Link Engine Management.
#!/usr/bin/python ## Printing troubleshooter ## Copyright (C) 2008, 2009 Red Hat, Inc. ## Copyright (C) 2008, 2009 Tim Waugh <twaugh@redhat.com> ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import cups from base import * class QueueNotEnabled(Question): def __init__ (self, troubleshooter): Question.__init__ (self, troubleshooter, "Queue not enabled?") self.label = gtk.Label () solution = gtk.VBox () self.label.set_line_wrap (True) self.label.set_alignment (0, 0) solution.pack_start (self.label, False, False, 0) solution.set_border_width (12) troubleshooter.new_page (solution, self) def display (self): answers = self.troubleshooter.answers if not answers['cups_queue_listed']: return False if answers['is_cups_class']: queue = answers['cups_class_dict'] else: queue = answers['cups_printer_dict'] enabled = queue['printer-state'] != cups.IPP_PRINTER_STOPPED if enabled: return False if answers['cups_printer_remote']: attrs = answers['remote_cups_queue_attributes'] reason = attrs['printer-state-message'] else: reason = queue['printer-state-message'] if reason: reason = _("The reason given is: '%s'.") % reason else: reason = _("This may be due to the printer being disconnected or " "switched off.") text = ('<span weight="bold" size="larger">' + _("Queue Not Enabled") + '</span>\n\n' + _("The queue '%s' is not enabled.") % answers['cups_queue']) if reason: text += ' ' + reason if not answers['cups_printer_remote']: text += '\n\n' text += _("To enable it, select the 'Enabled' checkbox in the " "'Policies' tab for the printer in the printer " "administration tool.") text += ' ' + _(TEXT_start_print_admin_tool) self.label.set_markup (text) return True def can_click_forward (self): return False
You are visiting the placeholder page for Jessica Skinner. This page is here because someone used our placeholder utility to look for Jessica Skinner. We created this page automatically in hopes Jessica Skinner would find it. If you are not Jessica Skinner, but are an alumni of Thorndale High School, register on this site for free now.
#!/usr/bin/env python # vim: set fileencoding=utf-8 : # @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch> # @date: Wed 11 May 2016 09:39:36 CEST import numpy def scale_mean_norm(data, scale=0.00390625): mean = numpy.mean(data) data = (data - mean) * scale return data, mean """ Data """ class DataShuffler(object): def __init__(self, data, labels, perc_train=0.9, scale=True): """ Some base functions for neural networks **Parameters** data: """ scale_value = 0.00390625 total_samples = data.shape[0] indexes = numpy.array(range(total_samples)) numpy.random.shuffle(indexes) # Spliting train and validation train_samples = int(round(total_samples * perc_train)) validation_samples = total_samples - train_samples data = numpy.reshape(data, (data.shape[0], 28, 28, 1)) self.train_data = data[indexes[0:train_samples], :, :, :] self.train_labels = labels[indexes[0:train_samples]] self.validation_data = data[indexes[train_samples:train_samples + validation_samples], :, :, :] self.validation_labels = labels[indexes[train_samples:train_samples + validation_samples]] self.total_labels = 10 if scale: # data = scale_minmax_norm(data,lower_bound = -1, upper_bound = 1) self.train_data, self.mean = scale_mean_norm(self.train_data) self.validation_data = (self.validation_data - self.mean) * scale_value def get_batch(self, n_samples, train_dataset=True): if train_dataset: data = self.train_data label = self.train_labels else: data = self.validation_data label = self.validation_labels # Shuffling samples indexes = numpy.array(range(data.shape[0])) numpy.random.shuffle(indexes) selected_data = data[indexes[0:n_samples], :, :, :] selected_labels = label[indexes[0:n_samples]] return selected_data.astype("float32"), selected_labels def get_pair(self, n_pair=1, is_target_set_train=True, zero_one_labels=True): """ Get a random pair of samples **Parameters** is_target_set_train: Defining the target set to get the batch **Return** """ def get_genuine_or_not(input_data, input_labels, genuine=True): if genuine: # TODO: THIS KEY SELECTION NEEDS TO BE MORE EFFICIENT # Getting a client index = numpy.random.randint(self.total_labels) # Getting the indexes of the data from a particular client indexes = numpy.where(input_labels == index)[0] numpy.random.shuffle(indexes) # Picking a pair data = input_data[indexes[0], :, :, :] data_p = input_data[indexes[1], :, :, :] else: # Picking a pair from different clients index = numpy.random.choice(self.total_labels, 2, replace=False) # Getting the indexes of the two clients indexes = numpy.where(input_labels == index[0])[0] indexes_p = numpy.where(input_labels == index[1])[0] numpy.random.shuffle(indexes) numpy.random.shuffle(indexes_p) # Picking a pair data = input_data[indexes[0], :, :, :] data_p = input_data[indexes_p[0], :, :, :] return data, data_p if is_target_set_train: target_data = self.train_data target_labels = self.train_labels else: target_data = self.validation_data target_labels = self.validation_labels total_data = n_pair * 2 c = target_data.shape[3] w = target_data.shape[1] h = target_data.shape[2] data = numpy.zeros(shape=(total_data, w, h, c), dtype='float32') data_p = numpy.zeros(shape=(total_data, w, h, c), dtype='float32') labels_siamese = numpy.zeros(shape=total_data, dtype='float32') genuine = True for i in range(total_data): data[i, :, :, :], data_p[i, :, :, :] = get_genuine_or_not(target_data, target_labels, genuine=genuine) if zero_one_labels: labels_siamese[i] = not genuine else: labels_siamese[i] = -1 if genuine else +1 genuine = not genuine return data, data_p, labels_siamese def get_triplet(self, n_labels, n_triplets=1, is_target_set_train=True): """ Get a triplet **Parameters** is_target_set_train: Defining the target set to get the batch **Return** """ def get_one_triplet(input_data, input_labels): # Getting a pair of clients index = numpy.random.choice(n_labels, 2, replace=False) label_positive = index[0] label_negative = index[1] # Getting the indexes of the data from a particular client indexes = numpy.where(input_labels == index[0])[0] numpy.random.shuffle(indexes) # Picking a positive pair data_anchor = input_data[indexes[0], :, :, :] data_positive = input_data[indexes[1], :, :, :] # Picking a negative sample indexes = numpy.where(input_labels == index[1])[0] numpy.random.shuffle(indexes) data_negative = input_data[indexes[0], :, :, :] return data_anchor, data_positive, data_negative, label_positive, label_positive, label_negative if is_target_set_train: target_data = self.train_data target_labels = self.train_labels else: target_data = self.validation_data target_labels = self.validation_labels c = target_data.shape[3] w = target_data.shape[1] h = target_data.shape[2] data_a = numpy.zeros(shape=(n_triplets, w, h, c), dtype='float32') data_p = numpy.zeros(shape=(n_triplets, w, h, c), dtype='float32') data_n = numpy.zeros(shape=(n_triplets, w, h, c), dtype='float32') labels_a = numpy.zeros(shape=n_triplets, dtype='float32') labels_p = numpy.zeros(shape=n_triplets, dtype='float32') labels_n = numpy.zeros(shape=n_triplets, dtype='float32') for i in range(n_triplets): data_a[i, :, :, :], data_p[i, :, :, :], data_n[i, :, :, :], \ labels_a[i], labels_p[i], labels_n[i] = \ get_one_triplet(target_data, target_labels) return data_a, data_p, data_n, labels_a, labels_p, labels_n
There are different reasons a community may flood; storm surge, river flooding or heavy rainfall. Low-lying or poorly drained areas can also increase a community's flood risk. To protect yourself, learn what flood threats affect your community. As little as one foot of (moving) water can move most cars off the road. Just six inches of fast-moving flood water can sweep a person off his or her feet. Most flood-related deaths occur at night and are vehicular. Urban and small stream flash floods often occur in less than one hour. Tropical cyclones pose significant risk well inland due to fresh water flooding.
import numpy as np import math import scipy.special as sp from scipy.interpolate import lagrange from numpy.polynomial.chebyshev import chebgauss #import sys from utilities import * from interfaces import * #from utilities.arclength import* import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D R = 1 P = 1 intervals=9 vs_order=2 n = (intervals*(vs_order)+1-1) print (n) ii = np.linspace(0,2,n+1) #ii = [(n+1) * np.cos(i * np.pi / (n + 1)) for i in range(n+1)] control_points_3d = np.asarray(np.zeros([n+1,2,3]))#[np.array([R*np.cos(5*i * np.pi / (n + 1)), R*np.sin(5*i * np.pi / (n + 1)), P * i]) for i in range(0, n+1)] control_points_3d[:,0,0] = np.array([R*np.cos(5*i * np.pi / (n + 1))for i in ii]) control_points_3d[:,0,1] = np.array([R*np.sin(5*i * np.pi / (n + 1))for i in ii]) control_points_3d[:,0,2] = np.array([P*i for i in range(n+1)]) control_points_3d[:,1,0] = np.array([R*np.cos(5*i * np.pi / (n + 1))for i in ii]) control_points_3d[:,1,1] = np.array([R*np.sin(5*i * np.pi / (n + 1))for i in ii]) control_points_3d[:,1,2] = np.array([2*P*i for i in range(n+1)]) #print control_points_3d[0] vsl = IteratedVectorSpace(UniformLagrangeVectorSpace(vs_order+1), np.linspace(0,1,intervals+1)) print (vsl.n_dofs) #vsl = AffineVectorSpace(UniformLagrangeVectorSpace(n+1),1,5) #BSpline parameters n = 17 p = 3 # Number of least square points n_ls = 140 # Open knot vector knots = np.zeros(n+2*p) knots[p:-p] = np.linspace(0,1,n) knots[0:p] = 0 knots[-p::] = 1 #vsl = BsplineVectorSpace(p, knots) #print (vsl.n_dofs) arky = ArcLengthParametrizer(vsl, control_points_3d) new_control_points_3d = arky.reparametrize() new_arky = ArcLengthParametrizer(vsl, new_control_points_3d) new_arky.reparametrize() plt.plot(arky.points_s[:,0],arky.points_s[:,1],label='original') plt.plot(new_arky.points_s[:,0],new_arky.points_s[:,1],label='reparametrized') plt.legend() plt.savefig('new_arclength.png') plt.close() plt.close() print (np.amax(np.abs(control_points_3d - new_control_points_3d ))) #print (np.squeeze(new_control_points_3d[:,0,:])) tt = np.linspace(0, 1, 128) tt4 = 4 * tt + 1 #print (tt4) vals_1 = vsl.element(np.squeeze(control_points_3d[:,0,:]))(tt) vals_2 = vsl.element(np.squeeze(control_points_3d[:,1,:]))(tt) new_vals_1 = vsl.element(np.squeeze(new_control_points_3d[:,0,:]))(tt) new_vals_2 = vsl.element(np.squeeze(new_control_points_3d[:,1,:]))(tt) #print (vsl.element(np.squeeze(control_points_3d[:,1,:]))(tt4) == arky.curve(tt4)), #vals_1 #print (vals.shape, new_vals.shape) x = np.squeeze(np.array(vals_1[0,:])) y = np.squeeze(np.array(vals_1[1,:])) z = np.squeeze(np.array(vals_1[2,:])) #print (x.shape) new_x = np.squeeze(np.array(new_vals_1[0,:])) new_y = np.squeeze(np.array(new_vals_1[1,:])) new_z = np.squeeze(np.array(new_vals_1[2,:])) #print (new_x.shape, x.shape, np.amax(np.abs(vals-new_vals),0)) #print (control_points_3d[3]) #print (x,y,z) fig = plt.figure() ax = fig.gca(projection='3d') ax.plot(x, y, z,'r', label='test_curve') ax.plot(np.squeeze(np.array(control_points_3d[:,0,0])),np.squeeze(np.array(control_points_3d[:,0,1])),np.squeeze(np.array(control_points_3d[:,0,2])),'r*-',label='orig_cp') ax.plot(new_x, new_y, new_z,'g', label='new_test_curve') ax.plot(np.squeeze(np.array(new_control_points_3d[:,0,0])),np.squeeze(np.array(new_control_points_3d[:,0,1])),np.squeeze(np.array(new_control_points_3d[:,0,2])),'g*-',label='new_cp') ax.legend() plt.savefig('test_curve_1.png') plt.close() plt.close() #print (vals_1.shape, new_vals_1.shape) x = np.squeeze(np.array(vals_2[0,:])) y = np.squeeze(np.array(vals_2[1,:])) z = np.squeeze(np.array(vals_2[2,:])) new_x = np.squeeze(np.array(new_vals_2[0,:])) new_y = np.squeeze(np.array(new_vals_2[1,:])) new_z = np.squeeze(np.array(new_vals_2[2,:])) fig = plt.figure() #print (new_x) ax = fig.gca(projection='3d') ax.plot(x, y, z,'r', label='test_curve') ax.plot(np.squeeze(np.array(control_points_3d[:,1,0])),np.squeeze(np.array(control_points_3d[:,1,1])),np.squeeze(np.array(control_points_3d[:,1,2])),'r*-',label='orig_cp') ax.plot(new_x, new_y, new_z,'g', label='new_test_curve') ax.plot(np.squeeze(np.array(new_control_points_3d[:,1,0])),np.squeeze(np.array(new_control_points_3d[:,1,1])),np.squeeze(np.array(new_control_points_3d[:,1,2])),'g*-',label='new_cp') ax.legend() plt.savefig('test_curve_2.png') plt.close() plt.close()
Choologic - Puzzle game Choologic is a full working day as train company engineer. See if you are smart and talented enough to lead the trains to the places where they belong. So if you agree that Puzzle Games are fun play MonsterMover. Have fun!
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Blaze(CMakePackage): """Blaze is an open-source, high-performance C++ math library for dense and sparse arithmetic. With its state-of-the-art Smart Expression Template implementation Blaze combines the elegance and ease of use of a domain-specific language with HPC-grade performance, making it one of the most intuitive and fastest C++ math libraries available. """ homepage = "https://bitbucket.org/blaze-lib/blaze/overview" url = "https://bitbucket.org/blaze-lib/blaze/downloads/blaze-3.8.tar.gz" git = "https://bitbucket.org/blaze-lib/blaze.git" # Blaze requires at least cmake 3.8.0 for C++14 features. depends_on('cmake@3.8.0:', type='build') depends_on('blas') version('master', branch='master') version('3.8', sha256='dfaae1a3a9fea0b3cc92e78c9858dcc6c93301d59f67de5d388a3a41c8a629ae') version('3.7', sha256='ef3cbc5db7d62dcdde0af88d3c951051254afd750d26773406fddb6afc5ad890') version('3.6', sha256='2ebbadacaf3f066e27352c1e413ead127b7ced8a3b202ae45f39c8f5f12324cc') version('3.5', sha256='f50d4a57796b8012d3e6d416667d9abe6f4d95994eb9deb86cd4491381dec624') version('3.4', sha256='fd474ab479e81d31edf27d4a529706b418f874caa7b046c67489128c20dda66f') version('3.3', sha256='138cbb7b95775c10bf56a5ab3596a32205751299b19699984b6ed55b1bf989d0') version('3.2', sha256='fb7e83d3a8c1ba04d3a51234708092b75a1abf3b7c4d0db5e6cf3cbed771b869') version('3.1', sha256='a122d6758d9ada7ab516417f7b5ad186a4a9b390bba682f009df6585f5550716') version('3.0', sha256='d66abaf4633d60b6e6472f6ecd7db7b4fb5f74a4afcfdf00c92e1ea61f2e0870') version('2.6', sha256='a6b927db14b43fad483670dfa2acd7ecc94fd53085cdf18f262d2dc613857fb6') version('2.5', sha256='5faeca8a26e04f70a5b3f94e88ef1fbe96a89e3722cd89e5f9d4bc8267b33d41') version('2.4', sha256='34af70c8bb4da5fd0017b7c47e5efbfef9aadbabc5aae416582901a4059d1fa3') version('2.3', sha256='785089db7f15684c24018b931f9f564954a79389166ac1f3e256a56c667d49f2') version('2.2', sha256='448e70a440d71afa6325bae254ca7367b10e61431084adbf2ac679dbd5da78d2') version('2.1', sha256='b982c03236c6a7ae396850eba0ef8fb1642ddf6448531063bf7239d9ff3290fd') version('2.0', sha256='7bdf555e97455a2f42f40396b32caa9cf3e52bdd1877e0289115825113f4dcb2') version('1.5', sha256='5c69b605b712616dcd29fa25abecb20b977ef318207ef96176ab67b2ad891e1e') version('1.4', sha256='2e48d2e5a3a06abb23716829501bb0b825c58ad156faab6df0cfeef1bcdfbc82') version('1.3', sha256='361bfbf2d2bf8557d123da3af8abc70e4c3b13d9c94a8227aeb751e06acdb8cf') version('1.2', sha256='16f56d4f61dca229fa7e17a0d1e348a1f3246c65cded2df5db33babebf8f9b9d') version('1.1', sha256='6add20eb9c176ea9f8091c49b101f46d1a1a6bd9c31553a6eff5e53603f0527f') version('1.0', sha256='ee13cfd467c1a4b0fe7cc58b61b846eae862167a90dd2e60559626a30418b5a3')
Center for Rural Affairs Policy Associate Anna Johnson says the recently passed 2018 federal spending bill provides support for rural America in several important programs. The bill funds the government through Sept. 30, 2018. NRCS funding for technical assistance increased to $874 million from 2017, Johnson explained, noting that this funding supports local NRCS offices, where farmers and ranchers access technical assistance for conservation practices. “The bill funds the Value-Added Producer Grant Program, which allows farmers and ranchers to diversify their income by processing farm and ranch products. This program was funded at the same level as 2017, $15 million,” the release said. The Rural Microentrepreneur Assistance Program (RMAP), which provides loan funds and technical assistance to rural entrepreneurs, remains at the funding level provided in the previous farm bill, $2.8 million, without additional support. The Sustainable Agriculture Research and Education Program received its highest funding level in 30 years, at $35 million. “We are very encouraged that Congress has shown this support for publicly funded research in sustainable agriculture,” Johnson said. The Outreach and Assistance for Socially Disadvantaged and Veteran Farmers and Ranchers Program, also known as the 2501 Program, received $3 million in funding for 2018, which is in addition to the $10 million in funding provided by the farm bill. This program has allowed many farmers and ranchers from around the country, who are new to farm programs, to access U.S. Department of Agriculture support.