text
stringlengths
29
850k
# Copyright (c) 2019 J. Alvarez-Jarreta and C.J. Brasher # # This file is part of the LipidFinder software tool and governed by the # 'MIT License'. Please see the LICENSE file that should have been # included as part of this software. """Graphical User Interface (GUI) to manage the parameters' collection. """ from collections import OrderedDict import os from IPython.display import display from ipywidgets import widgets, Layout import pandas from LipidFinder.Configuration import LFParameters from LipidFinder._utils import normalise_path class _TaggedToggleButton(widgets.ToggleButton): """Add "tag" attribute to widgets.ToggleButton class.""" def __init__(self, tag, **kwargs): widgets.ToggleButton.__init__(self, **kwargs) self.tag = tag class _TaggedCheckbox(widgets.Checkbox): """Add "tag" attribute to widgets.Checkbox class.""" def __init__(self, tag, **kwargs): widgets.Checkbox.__init__(self, **kwargs) self.tag = tag class _TaggedButton(widgets.Button): """Add "tag" attribute to widgets.Button class.""" def __init__(self, tag, **kwargs): widgets.Button.__init__(self, **kwargs) self.tag = tag class LFParametersGUI(LFParameters): """A LFParametersGUI object stores a set of LipidFinder parameters to be used in the specified module. This subclass of LFParameters implements a graphical interface using jupyter notebook's widgets, executed during the object creation. It allows the user to check, change and save each active parameter's value interactively. Attributes: _parameters (Private[collections.OrderedDict]) Dictionary where the parameters and their associated information are stored. _floatPointPrecision (Private[int]) Number of digits after the radix point in floats. _floatStep (Private[float]) Minimum difference between two consecutive float numbers. _style (Private[dict]) Dictionary with the default style settings for widgets. _inputWidth (Private[str]) String representation of the default width of input widgets. _widgets (Private[collections.OrderedDict]) Dictionary where the widgets for each parameter are stored. Examples: LFParametersGUI objects can be created as follows: >>> from Configuration.LFParametersGUI import ... LFParametersGUI >>> LFParametersGUI() >>> LFParametersGUI(src='/home/user/my_parameters.json') The former will load the default PeakFilter parameters and will load and display the interface afterwards. The latter will load the default PeakFilter parameters, override them with the values found in the JSON file provided, and finally it will load and display the interface. Alternatively, a specific module can be introduced as argument: >>> from Configuration.LFParametersGUI import ... LFParametersGUI >>> LFParametersGUI(module='mssearch') """ def __init__(self, precision=4, **kwargs): # type: (int, ...) -> LFParametersGUI """Constructor of the class LFParametersGUI. First, the module's parameters template file is loaded. Next, if a source JSON parameters file path is provided, the default values are overwritten by the corresponding new (valid) values. Finally, the graphical user interface is displayed. Keyword Arguments: precision -- number of decimal digits to use with floats (e.g. a precision of 2 forces a difference of 0.01 between any two consecutive float numbers) [default: 4] """ # Minimum difference between two consecutive float numbers self._floatPointPrecision = precision self._floatStep = 10 ** -(precision) # Load the parameters dictionary using parent class' constructor LFParameters.__init__(self, **kwargs) # Default style self._style = {'description_width': '0px'} # Default width of input widgets self._inputWidth = '26%' # Generate an ordered dict to store each parameter's set of # widgets in the same order as in the parameters' dict self._widgets = OrderedDict() # Create every widget of the GUI for key, data in self._parameters.items(): disabled = not self._is_active(key) # Load the information of each parameter self._widgets[key] = [self._create_label(key, disabled), self._create_help_icon(key, disabled)] # Create the input widget or container of input widgets for # each parameter type if (data['type'] == 'bool'): self._widgets[key].append( self._create_bool_widget(key, disabled)) elif (data['type'] == 'int'): self._widgets[key].append( self._create_int_widget(key, disabled)) elif (data['type'] == 'float'): self._widgets[key].append( self._create_float_widget(key, disabled)) elif (data['type'] == 'selection'): self._widgets[key].append( self._create_selection_widget(key, disabled)) elif (data['type'] == 'path'): self._widgets[key].append( self._create_path_widget(key, disabled)) elif (data['type'] == 'int range'): self._widgets[key].append( self._create_int_range_widget(key, disabled)) elif (data['type'] == 'float range'): self._widgets[key].append( self._create_float_range_widget(key, disabled)) elif (data['type'] == 'multiselection'): self._widgets[key].append( self._create_multiselection_widget(key, disabled)) elif (data['type'] == 'pairs'): self._widgets[key].append( self._create_pairs_widget(key, disabled)) else: # data['type'] == 'str' self._widgets[key].append( self._create_str_widget(key, disabled)) # Display the GUI hboxLayout = Layout(align_items='center') for key, widgetList in self._widgets.items(): display(widgets.HBox(widgetList, layout=hboxLayout)) # Finally, create the save interface to allow the user to save # the current parameters values in a JSON file display(widgets.HBox([], layout=Layout(height='15px'))) display(widgets.HBox([], layout=Layout(height='0px', border='2px solid lightgray'))) display(widgets.HBox([], layout=Layout(height='2px'))) self._widgets['save'] = self._create_save_widget() hboxLayout = Layout(justify_content='space-between', align_items='center') display(widgets.HBox(self._widgets['save'], layout=hboxLayout)) def _create_label(self, key, disabled): # type: (str, bool) -> widgets.HTML """Return an HTML widget with the parameter's description. If 'disabled' is False, the text will be in black, otherwise it will be in gray. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ text = self._parameters[key]['description'] label = ("<p style=\"font-size:110%; line-height:19px; color:{0};\">{1}" "</p>").format('Gray' if disabled else 'Black', text) return widgets.HTML(value=label, style=self._style, layout=Layout(width='50%')) def _create_help_icon(self, key, disabled): # type: (str, bool) -> widgets.HTML """Return an HTML widget with the parameter's help as tooltip of a help icon. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ if ('help' in self._parameters[key]): code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.c" "om/icon?family=Material+Icons\"><i class=\"material-icons" "\" style=\"color:{0}; font-size:18px; display:inline" "-flex; vertical-align:middle;\" title=\"{1}\">help</i>" "").format("SteelBlue", self._parameters[key]['help']) else: code = '' layout = Layout(width='2%', visibility='hidden' if disabled else 'visible') return widgets.HTML(value=code, style=self._style, layout=layout) def _create_str_widget(self, key, disabled): # type: (str, bool) -> widgets.Text """Return a Text widget with the parameter's value. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ if ('example' in self._parameters[key]): example = self._parameters[key]['example'] else: example = '' inputWidget = widgets.Text( value=self[key], description=key, placeholder=example, style=self._style, layout=Layout(width=self._inputWidth), continuous_update=False, disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._default_handler, names='value') return inputWidget def _create_bool_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing a ToggleButton widget to represent the parameter's value. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = _TaggedToggleButton( value=self[key], description='Yes' if self[key] else 'No', tag=key, style=self._style, layout=Layout(width='50%'), button_style='primary', disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._bool_handler, names='value') layout = Layout(width=self._inputWidth, justify_content='center') return widgets.HBox([inputWidget], layout=layout) def _create_int_widget(self, key, disabled): # type: (str, bool) -> widgets.BoundedIntText """Return a BoundedIntText widget with the parameter's value. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = widgets.BoundedIntText( value=self[key], description=key, min=self._min(key), max=self._max(key), style=self._style, layout=Layout(width=self._inputWidth), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'] = inputWidget.value # Add handler for when the "value" trait changes inputWidget.observe(self._default_handler, names='value') return inputWidget def _create_float_widget(self, key, disabled): # type: (str, bool) -> widgets.BoundedFloatText """Return a BoundedFloatText widget with the parameter's value. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = widgets.BoundedFloatText( value=self[key], description=key, min=self._min(key), max=self._max(key), step=self._floatStep, style=self._style, layout=Layout(width=self._inputWidth), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'] = inputWidget.value # Add handler for when the "value" trait changes inputWidget.observe(self._default_handler, names='value') return inputWidget def _create_selection_widget(self, key, disabled): # type: (str, bool) -> widgets.Dropdown """Return a Dropdown widget with the parameter's options and its current value selected. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = widgets.Dropdown( options=self._parameters[key]['options'], value=self[key], description=key, style=self._style, layout=Layout(width=self._inputWidth), disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._default_handler, names='value') return inputWidget def _create_path_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing a Text widget with the parameter's value. If the Text widget is enabled and the file does not exist, a warning icon will be displayed next to it to alert the user. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ inputWidget = widgets.Text( value=self[key], description=key, style=self._style, layout=Layout(width='92%'), continuous_update=False, disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._path_handler, names='value') # Create an HTML widget with a warning icon that will be # displayed if the Text widget is enabled and the file does not # exist code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.com/i" "con?family=Material+Icons\"><i class=\"material-icons\" style=" "\"font-size:18px; color:Red; display:inline-flex; vertical-ali" "gn:middle;\" title=\"File not found!\">warning</i>") warn = not disabled and not os.path.isfile(self[key]) layout = Layout(width='5%', visibility='visible' if warn else 'hidden') warnWidget = widgets.HTML(value=code, style=self._style, layout=layout) layout = Layout(width='46%', justify_content='space-between') return widgets.HBox([inputWidget, warnWidget], layout=layout) def _create_int_range_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing two BoundedIntText widgets with the parameter's range values. The widgets are created to fulfill the "int range" type condition: lower_bound < upper_bound Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ lowerBound = widgets.BoundedIntText( value=self[key][0], description=key, min=self._min(key), max=self[key][1] - 1, style=self._style, layout=Layout(width='50%'), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'][0] = lowerBound.value # Add handler for when the "value" trait changes lowerBound.observe(self._range_handler, names='value') upperBound = widgets.BoundedIntText( value=self[key][1], description=key, min=self[key][0] + 1, max=self._max(key), style=self._style, layout=Layout(width='50%'), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'][1] = upperBound.value # Add handler for when the "value" trait changes upperBound.observe(self._range_handler, names='value') return widgets.HBox([lowerBound, upperBound], layout=Layout(width=self._inputWidth)) def _create_float_range_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing two BoundedFloatText widgets with the parameter's range values. The widgets are created to fulfill the "float range" type condition: lower_bound < upper_bound Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ lowerBound = widgets.BoundedFloatText( value=self[key][0], description=key, min=self._min(key), max=self[key][1] - self._floatStep, step=self._floatStep, style=self._style, layout=Layout(width='50%'), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'][0] = lowerBound.value # Add handler for when the "value" trait changes lowerBound.observe(self._range_handler, names='value') upperBound = widgets.BoundedFloatText( value=self[key][1], description=key, min=self[key][0] + self._floatStep, max=self._max(key), step=self._floatStep, style=self._style, layout=Layout(width='50%'), continuous_update=False, disabled=disabled) # Save the widget's value in case its constructor automatically # replaces an empty one given as argument self._parameters[key]['value'][1] = upperBound.value # Add handler for when the "value" trait changes upperBound.observe(self._range_handler, names='value') return widgets.HBox([lowerBound, upperBound], layout=Layout(width=self._inputWidth)) def _create_multiselection_widget(self, key, disabled): # type: (str, bool) -> widgets.Box """Return a Box containing as many Checkbox widgets as parameter's options, with those in its "value" field checked. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ itemWidgets = [] for item in self._parameters[key]['options']: layoutWidth = '23%' if (len(item) <= 10) else '48%' inputWidget = _TaggedCheckbox( value=item in self[key], description=item, tag=key, style=self._style, layout=Layout(width=layoutWidth), disabled=disabled) # Add handler for when the "value" trait changes inputWidget.observe(self._multiselection_handler, names='value') itemWidgets.append(inputWidget) layout = Layout(width='46%', display='flex', flex_flow='row wrap', justify_content='space-between') return widgets.Box(itemWidgets, layout=layout) def _create_pairs_widget(self, key, disabled): # type: (str, bool) -> widgets.HBox """Return an HBox containing the interface to add and remove pairs of available elements. The term "available elements" refers to those elements in the first column of the CSV file's path stored under the parameter's "file" key. Users will not be able to add existing pairs or pairs formed by the same element twice. Keyword Arguments: key -- name of the parameter disabled -- is the parameter/widget disabled? """ # Load the list of available elements from the first column of # the CSV file saved under the parameter's "file" key srcFilePath = self[self._parameters[key]['file']] options = pandas.read_csv(srcFilePath).iloc[:, 0].tolist() # Create two Select widgets with the list of available elements leftSelect = widgets.Select( options=options, rows=4, style=self._style, layout=Layout(width='20%'), disabled=disabled) rightSelect = widgets.Select( options=options, rows=4, style=self._style, layout=Layout(width='20%'), disabled=disabled) # Create the add and remove buttons with the handler to add and # remove pairs, respectively addButton = _TaggedButton( description='Pair >>', tooltip='Add new pair', tag=key, layout=Layout(width='95%'), disabled=disabled) # Add handlerfor when the button is clicked addButton.on_click(self._pairs_add_handler) delButton = _TaggedButton( description='<< Remove', tooltip='Remove selected pair', tag=key, layout=Layout(width='95%'), disabled=disabled) # Add handler for when the button is clicked delButton.on_click(self._pairs_del_handler) layout = Layout(width='21%', justify_content='space-around') # Hold the buttons in a VBox to get the desired layout buttonsBox = widgets.VBox([addButton, delButton], layout=layout) # Create a Select widget with the parameter's list of pairs pairs = [' , '.join(x) for x in self[key]] pairsSelect = widgets.Select( options=pairs, rows=4, style=self._style, layout=Layout(width='28%'), disabled=disabled) layout = Layout(width='46%', justify_content='space-around') return widgets.HBox([leftSelect, rightSelect, buttonsBox, pairsSelect], layout=layout) def _create_save_widget(self): # type: () -> list """Return a list containing the interface to save the current parameters values as a JSON file in an introduced path. """ text = ("<p style=\"font-size:110%; line-height:19px; color:Black;\">" "Where do you want to save the new set of parameters?</p>") label = widgets.HTML(value=text, style=self._style, layout=Layout(width='38%')) # Create the path input widget (Text) with a default path and # file name defaultPath = normalise_path("parameters.json") inputWidget = widgets.Text( value=defaultPath, placeholder=defaultPath, style=self._style, layout=Layout(width='40%'), continuous_update=False) # Add handler for when the "value" trait changes inputWidget.observe(self._save_path_handler, names='value') # Create an HTML widget with a warning icon that will be # displayed if the directory path does not exist code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.com/i" "con?family=Material+Icons\"><i class=\"material-icons\" style=" "\"font-size:18px; color:Red; display:inline-flex; vertical-ali" "gn:middle;\" title=\"Path not found!\">warning</i>") dirPath = os.path.split(inputWidget.value)[0] visibility = 'visible' if not os.path.isdir(dirPath) else 'hidden' layout = Layout(width='2%', visibility=visibility) warnWidget = widgets.HTML(value=code, style=self._style, layout=layout) # Create a save button that will be active only if every active # parameter is valid and the destination path exists saveButton = widgets.Button( description='Save', button_style='danger', tooltip='Save parameters in a JSON file', layout=Layout(width='12%', height='35px'), disabled=not self._valid_parameters()) # Add handler for when the button is clicked saveButton.on_click(self._save_button_handler) return [label, inputWidget, warnWidget, saveButton] def _update(self): # type: () -> None """Return an HBox containing the interface to add and remove pairs of available elements. The term "available elements" refers to those elements in the first column of the CSV file's path stored under the parameter's "file" key. Users will not be able to add existing pairs or pairs formed by the same element twice. If the CSV file path changes, the pairs list will be emptied and the set of available elements will be updated. """ # Update the status and/or visibility of each parameter's widget for key in self._parameters.keys(): interface = self._widgets[key] disabled = not self._is_active(key) if (disabled): interface[0].value = interface[0].value.replace('Black', 'Gray') else: interface[0].value = interface[0].value.replace('Gray', 'Black') interface[1].layout.visibility = 'hidden' if disabled else 'visible' typeStr = self._parameters[key]['type'] if (typeStr == 'bool'): interface[2].children[0].disabled = disabled elif (typeStr in ['int', 'float']): # Update minimum and maximum bounds too interface[2].min = self._min(key) interface[2].max = self._max(key) interface[2].disabled = disabled elif (typeStr == 'path'): interface[2].children[0].disabled = disabled # Display the warning widget if the parameter is enabled # and the file does not exist if (not disabled and not os.path.isfile(self[key])): interface[2].children[1].layout.visibility = 'visible' else: interface[2].children[1].layout.visibility = 'hidden' elif (typeStr in ['int range', 'float range']): # Update minimum and maximum bounds of the range too interface[2].children[0].min = self._min(key) interface[2].children[0].disabled = disabled interface[2].children[1].max = self._max(key) interface[2].children[1].disabled = disabled elif (typeStr == 'multiselection'): for child in interface[2].children: child.disabled = disabled elif (typeStr == 'pairs'): interface[2].children[0].disabled = disabled interface[2].children[1].disabled = disabled for grandchild in interface[2].children[2].children: grandchild.disabled = disabled interface[2].children[3].disabled = disabled else: interface[2].disabled = disabled # Ensure the save button should be available and ready to save # the new set of parameters self._widgets['save'][3].description = 'Save' self._widgets['save'][3].icon = '' self._widgets['save'][3].disabled = not self._valid_parameters() def _default_handler(self, change): # type: (dict) -> None """Handle the "value" trait change assigning the new value to the corresponding parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].description self._parameters[key]['value'] = change['new'] self._update() def _bool_handler(self, change): # type: (dict) -> None """Handle the "value" trait change assigning the new value to the corresponding "bool" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].tag self._parameters[key]['value'] = change['new'] # Change ToggleButton's description to "Yes" or "No" depending # on whether its new value is True or False, respectively change['owner'].description = 'Yes' if change['new'] else 'No' self._update() def _path_handler(self, change): # type: (dict) -> None """Handle the "value" trait change assigning the new value to the corresponding "path" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].description self._parameters[key]['value'] = normalise_path(change['new']) # Replace the introduced path by its normalised version to # provide the user with more information in case there is # something wrong with the path change['owner'].value = self[key] # Get the "pairs" type parameter that has this parameter in its # "field" key to update the contents of its widgets for param, data in self._parameters.items(): if ((data['type'] == 'pairs') and (data['file'] == key)): pairsWidget = self._widgets[param][2] if (os.path.isfile(self[key])): # Update the information of available elements options = pandas.read_csv(self[key]).iloc[:, 0].tolist() pairsWidget.children[0].options = options pairsWidget.children[1].options = options else: # Since the file does not exist, there are no # available elements pairsWidget.children[0].options = [] pairsWidget.children[1].options = [] # Since the file has changed, empty the list of pairs self._parameters[param]['value'] = [] pairsWidget.children[3].options = [] break self._update() def _range_handler(self, change): # type: (dict) -> None """Handle the "value" trait change assigning the new value to the corresponding "int/float range" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].description # Both children have the same step step = self._widgets[key][2].children[0].step if (change['owner'].min == self._min(key)): # Trait changed in the widget corresponding to the lower # bound of the range self._parameters[key]['value'][0] = change['new'] self._widgets[key][2].children[1].min = change['new'] + step else: # Trait changed in the widget corresponding to the upper # bound of the range self._parameters[key]['value'][1] = change['new'] self._widgets[key][2].children[0].max = change['new'] - step self._update() def _multiselection_handler(self, change): # type: (dict) -> None """Handle the "value" trait change updating the list of values of the corresponding "multiselection" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ key = change['owner'].tag if (change['new']): self._parameters[key]['value'].append(change['owner'].description) else: self._parameters[key]['value'].remove(change['owner'].description) self._update() def _pairs_add_handler(self, button): # type: (_TaggedButton) -> None """Handle when the button is clicked to add a pair to the corresponding "pairs" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: button -- clicked button widget instance """ key = button.tag # Add selected elements in both Selection widgets as a new pair leftSel = self._widgets[key][2].children[0].value rightSel = self._widgets[key][2].children[1].value newPair = [leftSel, rightSel] # The pairs are considered sets, that is, the order of the # elements is ignored if ((leftSel != rightSel) and (newPair not in self[key]) and (newPair[::-1] not in self[key])): self._parameters[key]['value'].append(newPair) # Since the "options" field is a tuple, build a new list # with the new pair self._widgets[key][2].children[3].options = \ [' , '.join(x) for x in self[key]] self._update() def _pairs_del_handler(self, button): # type: (_TaggedButton) -> None """Handle when the button is clicked to remove a pair of the corresponding "pairs" type parameter. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: button -- clicked button widget instance """ key = button.tag pairsWidget = self._widgets[key][2].children[3] # Get the selected pair from the pairs widget pairSel = pairsWidget.value if (pairSel): pair = pairSel.split(' , ') self._parameters[key]['value'].remove(pair) # Since the "options" field is a tuple, build a new list # without the deleted pair pairsWidget.options = [' , '.join(x) for x in self[key]] # Select the first pair to ensure coherence with the change if (pairsWidget.options): pairsWidget.value = pairsWidget.options[0] self._update() def _save_path_handler(self, change): # type: (dict) -> None """Handle the "value" trait change checking if the path where to save the parameters values exists. A warning sign will be displayed if the given directory path does not exist. The update() method is launched at the end to ensure every widget is updated according to the change in this parameter. Keyword Arguments: change -- dict holding the information about the change """ newPath = normalise_path(change['new']) dirPath = os.path.split(newPath)[0] if (not os.path.isdir(dirPath)): self._widgets['save'][2].layout.visibility = 'visible' else: self._widgets['save'][2].layout.visibility = 'hidden' # Replace the introduced path by its normalised version to # provide the user with more information in case there is # something wrong change['owner'].value = newPath self._update() def _save_button_handler(self, button): # type: (widgets.Button) -> None """Handle when the button is clicked to save the parameters values in a JSON file. Keyword Arguments: button -- clicked button widget instance """ self.write(self._widgets['save'][1].value) # Change the button's text to tell the user the JSON parameters # file has been correctly created button.description = 'Saved' button.icon = 'check' def _min(self, key): # type: (str) -> object """Return the largest value in the parameter's "min" list. Applies round() method to the output of LFParameter._max() to get a more comparable result regarding floating point arithmetic issues. Keyword Arguments: key -- name of the parameter """ return round(LFParameters._min(self, key), self._floatPointPrecision) def _max(self, key): # type: (str) -> object """Return the smallest value in the parameter's "max" list. Applies round() method to the output of LFParameter._max() to get a more comparable result regarding floating point arithmetic issues. Keyword Arguments: key -- name of the parameter """ return round(LFParameters._max(self, key), self._floatPointPrecision) def _valid_parameters(self): # type: () -> bool """Return True if every active parameter has a valid value, False otherwise. The list of valid parameters also includes "save" destination path, where the JSON parameters file will be saved. """ enabledKeys = (x for x in self._parameters.keys() if self._is_active(x)) for key in enabledKeys: data = self._parameters[key] # Only "multiselection" type parameters can be empty ([]) if ((data['type'] != 'multiselection') and (data['value'] in [None, '', []])): return False # "path" type parameters must be checked manually, whilst # the rest are already controlled by their widget if ((data['type'] == 'path') and not os.path.isfile(data['value'])): return False # This method is also called when the save interface is being # created, so the "save" key will not exist yet if ('save' in self._widgets): # Check if the directory path where to save the JSON # parameters file exists dirPath = os.path.split(self._widgets['save'][1].value)[0] if (not os.path.isdir(dirPath)): return False return True
Click here to find out how far it is to Cobram Victoria AUSTRALIA. how far is it to Cobram Victoria AUSTRALIA? Tell me about travel to Cobram Victoria AUSTRALIA. I could probably use a hotel or a local tour of Cobram. Is there anywhere to hire a car in Cobram? I would also be interested in places to eat in Cobram Victoria AUSTRALIA.
from django.contrib import messages from django.shortcuts import HttpResponseRedirect from django.core.urlresolvers import reverse from smartmin.views import SmartCRUDL, SmartCreateView, SmartReadView, SmartListView from phoenix.apps.animals.models import Animal from phoenix.apps.utils.upload.views import UploadView, UploadListView, UploadDeleteView from .models import AnimalNote, AnimalDocument class AnimalDocumentUploadView(UploadView): model = AnimalDocument delete_url = 'records.animaldocument_delete' def get_context_data(self, **kwargs): context = super(AnimalDocumentUploadView, self).get_context_data(**kwargs) #context['animal'] = self.request.animal return context class AnimalDocumentListView(UploadListView): model = AnimalDocument delete_url = 'records.animaldocument_delete' def get_queryset(self): return AnimalDocument.objects.all()# filter(animal=self.kwargs['animal_id']).filter(deleted=False) class AnimalDocumentDeleteView(UploadDeleteView): model = AnimalDocument class AnimalNoteCRUDL(SmartCRUDL): model = AnimalNote class FormMixin(object): def __init__(self, **kwargs): # Prevent cyclic import errors from .forms import AnimalNoteForm self.form_class = AnimalNoteForm super(AnimalNoteCRUDL.FormMixin, self).__init__(**kwargs) class Create(FormMixin, SmartCreateView): def get(self, request, *args, **kwargs): animal_id = request.GET.get('animal', None) if not animal_id: messages.warning(request, 'Animal Id is required') return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) return super(AnimalNoteCRUDL.Create, self).get(request, *args, **kwargs) def pre_save(self, obj): animal_id = self.request.GET.get('animal', None) try: animal = Animal.objects.get(id=animal_id) except AnimalNote.DoesNotExist: messages.error(self.request, 'Animal Id is required') else: obj.animal = animal return obj def get_success_url(self): return reverse('animals.animal_read', args=[self.request.GET.get('animal', None)]) class Read(SmartReadView): fields = ('id', 'date', 'file', 'details', 'created', 'modified') def get_file(self, obj): return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>' class List(SmartListView): fields = ('id', 'date', 'file', 'details') def get_file(self, obj): if obj.file: return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>' return '' def get_queryset(self, **kwargs): queryset = super(AnimalNoteCRUDL.List, self).get_queryset(**kwargs) queryset = queryset.filter(animal=self.request.animal) return queryset # class AnimalGroupNoteCRUDL(SmartCRUDL): # model = AnimalGroupNote # # class FormMixin(object): # # def __init__(self, **kwargs): # # Prevent cyclic import errors # from .forms import AnimalGroupNoteForm # self.form_class = AnimalGroupNoteForm # super(AnimalGroupNoteCRUDL.FormMixin, self).__init__(**kwargs) # # class Create(FormMixin, SmartCreateView): # # def get(self, request, *args, **kwargs): # animalgroup_id = request.GET.get('group', None) # if not animalgroup_id: # messages.warning(request, 'Animal Group Id is required') # return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) # return super(AnimalGroupNoteCRUDL.Create, self).get(request, *args, **kwargs) # # def pre_save(self, obj): # animalgroup_id = self.request.GET.get('group', None) # try: # animalgroup = AnimalGroup.objects.get(id=animalgroup_id) # except AnimalGroup.DoesNotExist: # messages.error(self.request, 'Animal Id is required') # else: # obj.animalgroup = animalgroup # return obj # # def get_success_url(self): # return reverse('groups.animalgroup_read', args=[self.request.GET.get('group', None)]) # # class Read(SmartReadView): # fields = ('id', 'date', 'file', 'details', 'created', 'modified') # # def get_file(self, obj): # if obj.file: # return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>' # return '' # # class List(SmartListView): # fields = ('id', 'date', 'file', 'details') # # def get_file(self, obj): # if obj.file: # return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>' # return '' # # def get_queryset(self, **kwargs): # queryset = super(AnimalGroupNoteCRUDL.List, self).get_queryset(**kwargs) # queryset = queryset.filter(animalgroup=self.request.animalgroup) # return queryset
Accutane is “magic” pills used for the acne treatment. This drug has unique characteristics, and nowadays it does not have analogues. It contains Isotretinoin, a structural analogue of vitamin A. As it has analogical structure to vitamin A, the therapeutic characteristics are similar. One of the main factors of the acne formation is an excessive activity of the oil glands. Accutane lowers the production of the fatty secretion and prevents the pore clogging. The use of Accutane makes a sebaceous secretion softer due to which it is not accumulated in the sebaceous channels and is easily removed. It helps to clean the skin of the closed comedones (blackheads) and prevent the development of new acne. The activity of bacteria and inflammatory processes are reduced in the upper layers of epidermis during cleaned pores and reduced production of sebum. Pores are closed, and sebum is not kept there, so that skin becomes healthier. Accutane is effective during the severest forms of acne when neither antibiotics, nor cosmetic products help to clean the skin. In case of the light form of acne, this medicine is not used because a risk of the development of the side effect is high during the use of Accutane. The indications for the use of Accutane are the following skin diseases: nodulocystic acne, acne conglobata, closed and open comedones, etc. The pills of Accutane are taken once per day at the same time. A decision about the treatment should be made by a doctor who will consider a medical image of the disease and will prescribe a correct dose. It is considered that the optimal dose of Accutane may be prescribed according to the formula 0,5 mg per 1 kg of the body weight. If a patient weighs 70 kg, the daily dose will be 35 mg of Accutane. But in some cases (during severe forms of acne with the affection of a great part of the face and body skin), a higher dose of Accutane 1 mg per 1 kg of the body weight will be used. The first results of the treatment usually appear in 2-3 weeks, and a complete remission of acne happens in 12-20 weeks of the treatment. Accutane may be used for not more than 5 months at a run. Before beginning a new cycle of the treatment, it is necessary to make a break for 2-3 months. The use of Accutane is often accompanied by the unpleasant side effects: dry skin, dry mucous membranes, sweating, headache, nausea, lack of energy, pain in muscles and joints. The side effects may be reduced by means of the reductions of the daily dose. The symptoms of acne may become stronger in the first 1-2 weeks, and more rash may occur; this is completely normal.
#!/usr/bin/python # -*- coding: UTF-8 -*- import sys import json from django.http import JsonResponse from django.shortcuts import render from book.models import book from chapter.models import chapter def bookChapter(request): context = {} # get the book id of user input if it is not null if 'idBook' not in request.GET: context['status'] = "fail" context['message'] = "The idBook variable is not in request.GET." return JsonResponse(context) inputIdBook = request.GET['idBook'] # get the book name of user input if it is not null # if 'bookName' not in request.GET: # context['status'] = "fail" # context['message'] = "The bookName variable is not in request.GET." # return JsonResponse(context) # bookName = request.GET['bookName'] bookName = "" res, status, mes = book.getValue(inputIdBook, "name") if res: bookName = mes else: print "getchapter bookChapter error" + str(status) return render(request, 'chapter/bookChapter.html', context={'idBook': inputIdBook,'bookName': bookName}) def getChapter(request): context = {} reload(sys) sys.setdefaultencoding('utf8') # get the new book name of user input if it is not null if 'idBook' not in request.GET: context['status'] = "fail" context['message'] = "The idBook variable is not in request.GET." return JsonResponse(context) inputIdBook = request.GET['idBook'] res, statusNumber, mes = chapter.getAll(inputIdBook) if not res: context['status'] = "fail" context['message'] = "錯誤: " + mes return JsonResponse(context) context['status'] = "success" response_data = [] for m in mes: response_record = {} response_record['id'] = m.id response_record['name'] = m.name response_record['chapterOrder'] = m.chapterOrder response_record['book_name'] = book.getValue(m.idBook_id, "name")[2] response_data.append(response_record) context["message"] = response_data return JsonResponse(context)
One of our key fire safety suppliers is Briton, a renowned manufacturer of quality door closers. The Briton overhead door closer range provides door closing solutions to suit all projects, door applications and budgets. From the simplicity of a mechanical non-fire door closer for basic functionality, to a microprocessor controlled low energy operator for special applications e.g for doors required to satisfy Part M of the Building Regulations, there are Briton products to meet your needs. The Briton 121CE compact overhead door closer is suitable for the most popular door sizes. It has all the characteristics of performance, durability, functionality and quality that specifiers and users have come to expect from a Briton door control. The Briton 1100 series trimplate closers are designed to provide a mid-range product with choice of the most popular options, in a concise, cost effective package. The 2000 series offers adjustable and fixed power sizes providing a comprehensive package of solutions to suit the most popular door sizes. The Briton 2003V was designed to help you meet the requirements of BS8300:2001 for DDA use, it has variable power allowing easy on-site adjustment to meet both access and fire requirements. As well as making access easier for all users, it has all the benefits you have come to expect from the Briton 2003 – reliability, ease of installation and full certification. Further enhancements have been made for this upgrade and the Briton 2003V features both adjustable back check and the award winning Accufit installation system as standard. A closer will only perform correctly if it is fitted correctly, so ensuring accurate installation is essential to meeting the requirements of any legislation. A recent addition to the range is the Briton 2700 series, a precision manufactured cam-action slide channel closer, in a compact, overhead, surface fixed unit. Providing exceptional ease of use by reducing the resistance encountered when opening the door, the Briton 2700 Series bridges the gap between the requirements for fire and smoke control and ease of operation required for accessibility. As with the 2003V the 2700 door closer comes with the Accufit installation system to ensure correct fitting. Linked to the building’s alarm system, electromagnetic door controls eliminate the illegal practice of wedging doors open. This dangerous practice prevents a fire door functioning as it should in stopping the spread of flames and smoke in the event of a fire. The Briton 996 Series is a range of fixed power closers with an integrated electromagnetic hold open mechanism. When connected to the building fire alarm or detection system each unit can be set to either ‘hold-open’ or ‘swing-free’ operation. In either case the power of the closer can be temporarily disabled to allow free passage. When de-activated, the electromagnet disengages and the door closer closes the door in the normal manner to maintain fire safety. The new Briton 1130B.TE has been designed to hold the door open during normal use whilst connected to the building fire alarm or smoke detection system having an electromagnet in the slide track. On sounding of the fire alarm, or in the event of a power failure, the electromagnet will be deactivated and the door will close in the normal controlled manner.
# RandTalkBot Bot matching you with a random person on Telegram. # Copyright (C) 2016 quasiyoke # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import datetime import json import logging from peewee import DateTimeField, Model, Proxy, TextField LOGGER = logging.getLogger('randtalkbot.stats') def _(string): return string DATABASE_PROXY = Proxy() RATIO_MAX = 10 class Stats(Model): data_json = TextField() created = DateTimeField(default=datetime.datetime.utcnow, index=True) class Meta: database = DATABASE_PROXY def __init__(self, *args, **kwargs): super(Stats, self).__init__(*args, **kwargs) self._data_cache = None def get_data(self): if self._data_cache is None: self._data_cache = json.loads(self.data_json) return self._data_cache def set_data(self, data): self._data_cache = data self.data_json = json.dumps(data) def get_sex_ratio(self): """https://en.wikipedia.org/wiki/Human_sex_ratio Returns: float: Ratio of males over the females. """ try: sex_data = self.get_data()['sex_distribution'] except (KeyError, TypeError): return 1 males_count = sex_data.get('male', 0) females_count = sex_data.get('female', 0) if males_count > 0 and females_count > 0: return males_count / females_count elif males_count > 0: return RATIO_MAX elif females_count > 0: return 1 / RATIO_MAX return 1
biblical quotes about passing judgement bible verses about helping others bible god lord savior bible verses bible quotes verses bible verse passing judgement. quotes about a judgemental society submitted by quotes about judgemental society. quotes on not judging a book by its cover quote quotes similar to judging a book by its cover. quotes on judging a book by its cover discover ideas about my past quotes quotes about judging book by its cover. bible quotes about particular judgement where are the two judgments in the bible bible quotes about judgement day. bible quotes about judgement day prayer bible quotes judgement day. quotes for not judging a book by its cover quote on mental health stigma not all wound are so visible walk gently in quotes about judging book by its cover. judging someone by their appearance quotes judgemental people quotes inspirational best non judgement images on in of judgemental judging appearance quotes. bible quotes on being judgemental arguing bible verses bible verses bible verses about relationships bible verses quotes bible quotes judgement day. judgment at nuremberg movie quotes judgement at nuremberg movie quotes.
# -*- coding: utf-8 -*- ''' Created on 15 Φεβ 2013 @author: tedlaz ''' sqlco = u"INSERT INTO m12_co VALUES (1,'{0}','{1}','{2}',{3},'{4}','{5}','{6}','{7}','{8}','{9}','{10}','{11}','{12}','{13}')" from PyQt4 import QtCore, QtGui,Qt import utils_db,widgets import osyk from utils_qt import fFindFromList import datetime class NewDbWizard(QtGui.QWizard): def __init__(self, parent=None): super(NewDbWizard, self).__init__(parent) #self.setAttribute(Qt.Qt.WA_DeleteOnClose) Οχι γιατί δημιουργείται πρόβλημα ... #self.addPage(IntroPage()) self.addPage(coDataPage()) self.addPage(coDataPage2()) self.addPage(filePage()) self.addPage(finalPage()) self.setWizardStyle(QtGui.QWizard.ModernStyle) self.setOption(QtGui.QWizard.IndependentPages,True) #self.setPixmap(QtGui.QWizard.BannerPixmap,QtGui.QPixmap(':/banner')) #self.setPixmap(QtGui.QWizard.BackgroundPixmap, QtGui.QPixmap(':/background')) self.setWindowTitle(u"Οδηγός Δημιουργίας Νέου Αρχείου Μισθοδοσίας") def accept(self): #print '%s %s %s' % (self.field('epon'),self.field('cotyp_id'),self.field('fname')) fileSql = open(osyk.newDbFile) script = u'' for lines in fileSql: script += u'%s' % lines.decode('utf-8') utils_db.executeScript(script, self.field('fname')) sqlCo = sqlco.format(self.field('epon'),self.field('onom'),self.field('patr'),self.field('cotyp_id'), self.field('ame'),self.field('afm'),self.field('doy'),self.field('dra'), self.field('pol'),self.field('odo'),self.field('num'),self.field('tk'), self.field('ikac'),self.field('ikap')) print sqlCo utils_db.commitToDb(sqlCo, self.field('fname')) sqlCoy = u"INSERT INTO m12_coy VALUES (1,1,'Κεντρικό','%s')" % self.field('kad') utils_db.commitToDb(sqlCoy, self.field('fname')) etos = datetime.datetime.now().year utils_db.commitToDb(u"INSERT INTO m12_xrisi (xrisi,xrisip) VALUES ('{0}','Χρήση {0}')".format(etos), self.field('fname')) eidList = osyk.eid_cad_listFilteredDouble(self.field('kad')) #print eidList sqleid_ = u"INSERT INTO m12_eid (eidp,keid) VALUES ('{0}','{1}');\n" sqleid = u'' for el in eidList: sqleid += sqleid_.format(el[1],el[0]) utils_db.executeScript(sqleid,self.field('fname')) super(NewDbWizard, self).accept() class IntroPage(QtGui.QWizardPage): def __init__(self, parent=None): super(IntroPage, self).__init__(parent) self.setTitle(u"Οδηγίες") #self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark1')) label = QtGui.QLabel(u"Αυτός ο οδηγός θα δημιουργήσει νέο Αρχείο Μισθοδοσίας.\n\n " u"Εσείς θα πρέπει απλά να εισάγετε τις απαραίτητες παραμέτρους " u"καθώς και το όνομα του αρχείου και το σημείο αποθήκευσης.\n\n" u"Μπορείτε σε κάθε βήμα να αναθεωρήσετε και να επιστρέψετε.\n\n" u"Πατήστε δημιουργία στην τελευταία οθόνη για να ολοκληρώσετε.") label.setWordWrap(True) layout = QtGui.QVBoxLayout() layout.addWidget(label) self.setLayout(layout) class coDataPage(QtGui.QWizardPage): def __init__(self, parent=None): super(coDataPage, self).__init__(parent) #parent.button(QtGui.QWizard.BackButton).setVisible(False) #self.buttonText(QtGui.QWizard.NextButton) self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω') self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >') self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση') self.setTitle(u"Πληροφορίες εταιρίας") self.setSubTitle(u"Συμπληρώστε τα βασικά στοιχεία της εταιρίας") #self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1')) cotypLabel = QtGui.QLabel(u"Τύπος επιχείρησης:") cotyp = widgets.DbComboBox([[1,u'Νομικό Πρόσωπο'],[2,u'Φυσικό Πρόσωπο']]) cotypLabel.setBuddy(cotyp) eponNameLabel = QtGui.QLabel(u"Επωνυμία:") eponNameLineEdit = QtGui.QLineEdit() eponNameLabel.setBuddy(eponNameLineEdit) onomLabel = QtGui.QLabel(u"Όνομα (Για φυσικά πρόσωπα):") onomLineEdit = QtGui.QLineEdit() onomLineEdit.setDisabled(True) onomLabel.setBuddy(onomLineEdit) patrLabel = QtGui.QLabel(u"Πατρώνυμο (Για φυσικά πρόσωπα):") patrLineEdit = QtGui.QLineEdit() patrLineEdit.setDisabled(True) patrLabel.setBuddy(patrLineEdit) cotypValue = QtGui.QLineEdit() cotypValue.setText('1') def onCotypActivated(): if cotyp.currentIndex() ==1: onomLineEdit.setDisabled(False) patrLineEdit.setDisabled(False) cotypValue.setText('2') else: onomLineEdit.setText('') patrLineEdit.setText('') onomLineEdit.setDisabled(True) patrLineEdit.setDisabled(True) cotypValue.setText('1') cotyp.activated.connect(onCotypActivated) kadLabel = QtGui.QLabel(u"Κωδικός αρ.Δραστηριότητας:") kadLineEdit = QtGui.QLineEdit() kadLabel.setBuddy(kadLineEdit) kadLineEdit.setReadOnly(True) kadFindButton = QtGui.QPushButton(u'Εύρεση ΚΑΔ') kadLayout = QtGui.QHBoxLayout() kadLayout.addWidget(kadLineEdit) kadLayout.addWidget(kadFindButton) kadpLabel = QtGui.QLabel(u"Περιγραφή αρ.Δραστηριότητας:") kadpTextEdit = QtGui.QTextEdit() kadpLabel.setBuddy(kadpTextEdit) kadpTextEdit.setReadOnly(True) draLabel = QtGui.QLabel(u"Συντομογραφία Δραστηριότητας:") draLineEdit = QtGui.QLineEdit() draLabel.setBuddy(draLineEdit) def openFindDlg(): kadList = osyk.cad_list() head = [u'ΚΑΔ',u'Περιγραφή'] cw = [35,300] form = fFindFromList(kadList,head,cw) if form.exec_() == QtGui.QDialog.Accepted: kadLineEdit.setText(form.array[0]) kadpTextEdit.setText(form.array[1]) kadFindButton.clicked.connect(openFindDlg) self.registerField('cotyp_id',cotypValue) self.registerField('epon*', eponNameLineEdit) self.registerField('onom', onomLineEdit) self.registerField('patr', patrLineEdit) self.registerField('kad*', kadLineEdit) self.registerField('dra*', draLineEdit) #self.registerField('kadt*', kadpTextEdit) layout = QtGui.QGridLayout() layout.addWidget(cotypLabel, 0, 0) layout.addWidget(cotyp, 0, 1) layout.addWidget(eponNameLabel, 1, 0) layout.addWidget(eponNameLineEdit, 1, 1) layout.addWidget(onomLabel, 2, 0) layout.addWidget(onomLineEdit, 2, 1) layout.addWidget(patrLabel, 3, 0) layout.addWidget(patrLineEdit, 3, 1) layout.addWidget(kadLabel, 4, 0) layout.addLayout(kadLayout, 4, 1) layout.addWidget(kadpLabel,5, 0) layout.addWidget(kadpTextEdit, 5, 1,2,1) layout.addWidget(draLabel,7, 0) layout.addWidget(draLineEdit,7, 1) self.setLayout(layout) class coDataPage2(QtGui.QWizardPage): def __init__(self, parent=None): super(coDataPage2, self).__init__(parent) self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω') self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >') self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση') self.setTitle(u"Πληροφορίες εταιρίας") self.setSubTitle(u"Συμπληρώστε τα υπόλοιπα στοιχεία της εταιρίας") afmLabel = QtGui.QLabel(u"ΑΦΜ:") afmLineEdit = QtGui.QLineEdit() afmLabel.setBuddy(afmLineEdit) doyLabel = QtGui.QLabel(u"ΔΟΥ:") doyLineEdit = QtGui.QLineEdit() doyLabel.setBuddy(doyLineEdit) doyLineEdit.setReadOnly(True) doyFindButton = QtGui.QPushButton(u'...') doyFindButton.setMaximumSize(QtCore.QSize(20, 50)) doyLayout = QtGui.QHBoxLayout() doyLayout.addWidget(doyLineEdit) doyLayout.addWidget(doyFindButton) def openFindDlg(): head = [u'Κωδ',u'ΔΟΥ'] cw = [35,300] form = fFindFromList(osyk.doy_list(),head,cw) if form.exec_() == QtGui.QDialog.Accepted: doyLineEdit.setText(form.array[1]) doyFindButton.clicked.connect(openFindDlg) poliLabel = QtGui.QLabel(u"Πόλη:") poliLineEdit = QtGui.QLineEdit() poliLabel.setBuddy(poliLineEdit) tkLabel = QtGui.QLabel(u"Ταχ.Κωδικός:") tkLineEdit = QtGui.QLineEdit() tkLabel.setBuddy(tkLineEdit) odosLabel = QtGui.QLabel(u"Οδός:") odosLineEdit = QtGui.QLineEdit() odosLabel.setBuddy(odosLineEdit) numLabel = QtGui.QLabel(u"Αριθμός:") numLineEdit = QtGui.QLineEdit() numLabel.setBuddy(numLineEdit) ameLabel = QtGui.QLabel(u"Αρ.Μητρ.ΙΚΑ:") ameLineEdit = QtGui.QLineEdit() ameLabel.setBuddy(ameLineEdit) ikacLabel = QtGui.QLabel(u"Κωδ.ΙΚΑ:") ikacLineEdit = QtGui.QLineEdit() ikacLabel.setBuddy(ikacLineEdit) ikacLineEdit.setReadOnly(True) ikaLabel = QtGui.QLabel(u"Υπ/μα.ΙΚΑ:") ikaLineEdit = QtGui.QLineEdit() ikaLabel.setBuddy(ikaLineEdit) ikaLineEdit.setReadOnly(True) ikaFindButton = QtGui.QPushButton(u'...') ikaFindButton.setMaximumSize(QtCore.QSize(20, 50)) ikaLayout = QtGui.QHBoxLayout() ikaLayout.addWidget(ikaLineEdit) ikaLayout.addWidget(ikaFindButton) def openFindDlgIKA(): head = [u'Κωδ',u'Υποκατάστημα ΙΚΑ'] cw = [35,300] form = fFindFromList(osyk.ika_list(),head,cw) if form.exec_() == QtGui.QDialog.Accepted: ikacLineEdit.setText(form.array[0]) ikaLineEdit.setText(form.array[1]) ikaFindButton.clicked.connect(openFindDlgIKA) self.registerField('afm*',afmLineEdit) self.registerField('doy*',doyLineEdit) self.registerField('pol*',poliLineEdit) self.registerField('odo',odosLineEdit) self.registerField('num',numLineEdit) self.registerField('tk',tkLineEdit) self.registerField('ikac*',ikacLineEdit) self.registerField('ikap*',ikaLineEdit) self.registerField('ame*',ameLineEdit) layout = QtGui.QGridLayout() layout.addWidget(afmLabel, 0, 0) layout.addWidget(afmLineEdit, 0, 1) layout.addWidget(doyLabel, 0, 2) layout.addLayout(doyLayout, 0, 3) layout.addWidget(poliLabel, 1, 0) layout.addWidget(poliLineEdit, 1, 1) layout.addWidget(tkLabel, 1, 2) layout.addWidget(tkLineEdit, 1, 3) layout.addWidget(odosLabel, 2, 0) layout.addWidget(odosLineEdit, 2, 1) layout.addWidget(numLabel, 2, 2) layout.addWidget(numLineEdit, 2, 3) layout.addWidget(ameLabel, 3, 0) layout.addWidget(ameLineEdit, 3, 1) layout.addWidget(ikacLabel, 4, 0) layout.addWidget(ikacLineEdit, 4, 1) layout.addWidget(ikaLabel, 4, 2) layout.addLayout(ikaLayout, 4, 3) self.setLayout(layout) class filePage(QtGui.QWizardPage): def __init__(self, parent=None): super(filePage, self).__init__(parent) self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω') self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >') self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση') self.setTitle(u"Όνομα αρχείου") self.setSubTitle(u"Δώστε όνομα και περιοχή αποθήκευσης") #self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1')) fileNameLabel = QtGui.QLabel(u"Όνομα αρχείου:") self.fileNameLineEdit = QtGui.QLineEdit() self.fileNameLineEdit.setReadOnly(True) fileNameLabel.setBuddy(self.fileNameLineEdit) butFile = QtGui.QPushButton(u'...') butFile.clicked.connect(self.fSave) fileLayout = QtGui.QHBoxLayout() fileLayout.addWidget(self.fileNameLineEdit) fileLayout.addWidget(butFile) patrLabel = QtGui.QLabel(u"Πατρώνυμο (Για φυσικά πρόσωπα):") patrLineEdit = QtGui.QLineEdit() patrLabel.setBuddy(patrLineEdit) cotypLabel = QtGui.QLabel(u"Τύπος επιχείρησης:") cotyp = QtGui.QComboBox() cotypLabel.setBuddy(cotyp) cotyp.addItems([u'1.Νομικό Πρόσωπο',u'2.Φυσικό Πρόσωπο']) self.registerField('fname*', self.fileNameLineEdit) layout = QtGui.QGridLayout() layout.addWidget(fileNameLabel, 0, 0) layout.addLayout(fileLayout, 0, 1) self.setLayout(layout) def fSave(self): fileName = QtGui.QFileDialog.getSaveFileName(self, "QFileDialog.getSaveFileName()", self.field('fname'), "payroll m13 (*.m13)", QtGui.QFileDialog.Options()) if fileName: self.fileNameLineEdit.setText(fileName) class finalPage(QtGui.QWizardPage): def __init__(self, parent=None): super(finalPage, self).__init__(parent) self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω') self.setButtonText(QtGui.QWizard.FinishButton,u'Ολοκλήρωση') self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση') self.setTitle(u"Δημιουργία αρχείου ") #self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark2')) self.label = QtGui.QLabel() self.label.setWordWrap(True) layout = QtGui.QVBoxLayout() layout.addWidget(self.label) self.setLayout(layout) def initializePage(self): finishText = self.wizard().buttonText(QtGui.QWizard.FinishButton) finishText.replace('&', '') txt = u'Προσοχή , θα δημιουργηθεί αρχείο μισθοδοσίας με τις παρακάτω παραμέτρους :\n\n' txt += u'Στοιχεία Επιχείρησης : %s \n\n' % self.field('epon') txt += u'Όνομα Αρχείου : %s \n\n' % self.field('fname') txt += u"\nΠατήστε %s για να ολοκληρωθεί η διαδικασία." % finishText txt += u"\n\nΜε την ολοκλήρωση της διαδικασίας το νέο αρχείο είναι έτοιμο για εισαγωγή δεδομένων!!!" self.label.setText(txt) if __name__ == '__main__': import sys app = QtGui.QApplication(sys.argv) wizard = NewDbWizard() wizard.show() sys.exit(app.exec_())
Planning foods when pregnant can take a bit more idea, as your body requires extra energy and nutrients. With a few simple meal planning, getting the nutrition you need every day is easy. Full of many veggies, fruits, whole grains, and lean proteins, this strategy provides the nutrition you need in a tasty and exciting manner. The recipes meet Healthy Pregnancy nourishment parameters, meaning they deliver healthful nutrients moms-to-be want more of and they do not have any off-limits foods such as alcohol or unpasteurized cheese. This meal strategy is based on a 2, 900 calorie diet, but you may want more or fewer calories dependant upon your distinctive nourishment needs. – Toast the bread and top with mashed avocado. Season with just a pinch of pepper, and when desired, a dab of hot sauce. – Split English muffin in half. – Toast waffles then top with yogurt and drizzle with honey. – Serve casserole with a side of the spinach, dressed with 2 Tbsp. Olive oil and 1 Tbsp. Balsamic vinegar. Please Note: This meal strategy is controlled for calories, protein, folic acid, fiber, iron, calcium, and sodium.
def _create_pins(): """ :rtype: Iterable[int] """ middle = 5000 for i in range(0, 2 * middle): if i % 2 == 0: yield middle - i // 2 - 1 else: yield middle + i // 2 PINS = tuple(_create_pins()) assert len(PINS) == 10000, "Len = %d" % len(PINS) assert min(PINS) == 0000 assert max(PINS) == 9999 def get_pin_index(pin): return 0 if pin is None else PINS.index(pin) def pin_generator(last_pin=None): """ :type last_pin: int or None :rtype: Iterable[int] """ start_pos = get_pin_index(last_pin) + 1 if last_pin is not None else 0 for i in range(start_pos, len(PINS)): yield PINS[i] def test_selector(): print(get_pin_index(6000)) l1 = list(pin_generator(last_pin=9997)) assert len(frozenset(l1)) == 4 l2 = list(pin_generator(last_pin=4999)) assert len(frozenset(l2)) == 9999 l3 = list(pin_generator(last_pin=5000)) assert len(frozenset(l3)) == 9998 if __name__ == '__main__': test_selector()
Worrying about your chimney is stressful, but we’re here to help solve your needs with quality customer care! Scheduling an appointment is the first step in addressing your concerns, whether you have a leaky chimney, need masonry repair, or even want a general chimney inspection. The first step in scheduling an appointment is filling out basic information through our online form. This provides us a reference point, when we call you to get into specifics about your needs. It is a straightforward process from there, once we get in touch with you we can move closer to solving any chimney problems you have. With that said, an online form can only provide so much information. In order to provide the best quality customer service and experience in the region, we need your help. These are five appointment tips that you can use to ensure that we provide you a maximum return on investment. By taking initiative as a fireplace or chimney owner, you’re taking a shortcut to a faster and more efficient service. If you are scheduling an annual inspection or sweeping, keep in mind that the best time for maintenance is in the spring & summer. During the colder months, we are often booked out far in advance and working to be there for our customers during emergencies. Scheduling routine maintenance in the “off-season” allows us to offer prompter, more flexible service. Speaking of maintenance, it is a wise decision to let us know immediately if you suspect that there may be damage to your chimney or feel there might be a problem with it. Sometimes animals or pests take up residence in it if you don’t have a chimney cap to protect it. Or maybe you have a leaky chimney that drips down into your living room, causing water damage. Whatever the case, let us know when you suspect. After all, you deserve to have peace of mind. Save yourself headache and financial heartbreak by contacting us ASAP. To attain your peace of mind, it helps our professional team and technicians when you provide specific details about your problem. There is nothing wrong with being unsure about the situation – after all we’re the professionals – but it goes a long way when you describe characteristics about your concern. For example, you can say: “I’ve noticed that there are drafts coming in from my fireplace, I think that the hot air may be escaping” or “I suspect that there are raccoon’s or birds in my chimney”. These descriptions of your concerns are specific and concise, writing down your fears ahead of time helps us to troubleshoot and prepare a solution to your chimney problem. Savvy chimney and fireplace owners will likely have a few dates set in advance for chimney inspection and have specific concerns at hand. To fully implement a successful chimney inspection strategy though, chimney and fireplace owners are encouraged to prepare their home accordingly. If we are to service your fireplace, please discontinue use at least 24 hours before our arrival and remove the ash from the ashbox. We also ask that you move any precious items or breakables from the 5-6 foot area surrounding the appliance being serviced. This will help guarantee that our technicians can get right to work safely and quickly. You are our key partner in addressing your chimney or fireplace needs. We attribute our 30-year track record of success to collaboration with homeowners like you. Are you ready to take the next step in solving your chimney needs? Schedule an appointment today!
### # # WEIO Web Of Things Platform # Copyright (C) 2013 Nodesign.net, Uros PETREVSKI, Drasko DRASKOVIC # All rights reserved # # ## ## ######## #### ####### # ## ## ## ## ## ## ## # ## ## ## ## ## ## ## # ## ## ## ###### ## ## ## # ## ## ## ## ## ## ## # ## ## ## ## ## ## ## # ### ### ######## #### ####### # # Web Of Things Platform # # This file is part of WEIO and is published under BSD license. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. All advertising materials mentioning features or use of this software # must display the following acknowledgement: # This product includes software developed by the WeIO project. # 4. Neither the name of the WeIO nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY WEIO PROJECT AUTHORS AND CONTRIBUTORS ''AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL WEIO PROJECT AUTHORS AND CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors : # Uros PETREVSKI <uros@nodesign.net> # Drasko DRASKOVIC <drasko.draskovic@gmail.com> # ### from weioLib.weioIO import * from weioUserApi import serverPush from weioLib import weioRunnerGlobals import platform, sys # WeIO API bindings from websocket to lower levels # Each data argument is array of data # Return value is dictionary def callPinMode(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): pinMode(data[0],data[1]) else : print "pinMode ON PC", data return None def callPortMode(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): portMode(data[0],data[1]) else : print "pinMode ON PC", data return None def callDigitalWrite(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): digitalWrite(data[0], data[1]) else : print "digitalWrite ON PC", data return None def callDigitalRead(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = digitalRead(data[0]) bck["data"] = value bck["pin"] = data[0] else : print "digitalRead ON PC", data bck["data"] = 1 # faked value bck["pin"] = data[0] # pin return bck def callPulseIn(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = pulseIn(data[0], data[1], data[2]) bck["data"] = value bck["pin"] = data[0] bck["level"] = data[1] bck["timeout"] = data[1] else : print "pulseIn ON PC", data bck["data"] = 1 # faked value bck["pin"] = data[0] # pin bck["level"] = data[1] # level bck["timeout"] = data[2] # timeout return bck def callPortWrite(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): portWrite(data[0], data[1]) else : print "portWrite ON PC", data return None def callPortRead(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = portRead(data[0]) bck["data"] = value bck["port"] = data[0] else : print "digitalRead ON PC", data bck["data"] = 1 # faked value bck["port"] = data[0] # pin return bck def callDHTRead(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): dhtRead(data[0]) else : print "dhtRead ON PC", data return None def callAnalogRead(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): #print "From browser ", data value = analogRead(data[0]) # this is pin number bck["data"] = value bck["pin"] = data[0] else : print "analogRead ON PC", data bck["data"] = 1023 # faked value bck["pin"] = data[0] return bck def callSetPwmPeriod(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): setPwmPeriod(data[0],data[1]) else: print "setPwmPeriod ON PC", data return None # def callSetPwmLimit(data) : # if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): # setPwmLimit(data[0]) # else: # print "setPwmLimit ON PC", data # return None def callPwmWrite(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): pwmWrite(data[0], data[1]) else : print "pwmWrite ON PC", data return None def callProportion(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): #print "From browser ", data value = proportion(data[0],data[1],data[2],data[3],data[4]) bck["data"] = value else : print "proportion ON PC", data bck["data"] = data return bck def callAttachInterrupt(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): iObj = {"pin" : data[0], "jsCallbackString" : data[2]} attachInterrupt(data[0], data[1], genericInterrupt, iObj) else: print "attachInterrupt ON PC", data return None def callDetachInterrupt(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): detachInterrupt(data[0]) else: print "detachInterrupt ON PC", data return None def genericInterrupt(event, obj): bck = {} bck["data"] = obj["pin"] bck["eventType"] = getInterruptType(event["type"]) serverPush(obj["jsCallbackString"], bck) def callDelay(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): delay(data[0]) else : print "delay ON PC", data return None def callTone(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): print "TONE VALS", len(data) if (len(data)==2): tone(data[0], data[1]) elif (len(data)==3): tone(data[0], data[1], data[2]) else : print "tone ON PC", data return None def callNotone(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): noTone(data[0]) else : print "notone ON PC", data return None def callConstrain(data) : if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): constrain(data[0], data[1], data[2],) bck["data"] = value else : print "contrain ON PC", data bck["data"] = 1 # faked value bck["pin"] = data[0] # pin return bck def callMillis(data) : bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = millis() bck["data"] = value else : print "millis ON PC", data bck["data"] = 0 # faked value return bck def callGetTemperature(data): bck = {} if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True): value = getTemperature() bck["data"] = value else : print "getTemperature ON PC", data bck["data"] = 0 # faked value return bck def callUserMesage(data): print "USER TALKS", data #weioRunnerGlobals.userMain def pinsInfo(data) : bck = {} bck["data"] = weioRunnerGlobals.DECLARED_PINS #print("GET PIN INFO ASKED!", bck["data"]) return bck def callListSerials(data): bck = {} bck["data"] = listSerials() return bck # UART SECTION clientSerial = None def callInitSerial(data): global clientSerial if (clientSerial is None) : clientSerial = initSerial(data[0], data[1]) def callSerialWrite(data): global clientSerial if not(clientSerial is None) : clientSerial.write(data) else : sys.stderr.write("Serial port is not initialized. Use initSerial function first") def callSerialRead(data): global clientSerial bck = {} if not(clientSerial is None) : bck["data"] = clientSerial.read() else : sys.stderr.write("Serial port is not initialized. Use initSerial function first") return bck # SPI SECTION SPI = None def callInitSPI(data): global SPI if (SPI is None) : SPI = initSPI(data[0]) def callWriteSPI(data): global SPI if not(SPI is None) : SPI.write(data[0]) else : sys.stderr.write("SPI port is not initialized. Use initSerial function first") def callReadSPI(data): global SPI bck = {} if not(SPI is None) : bck["data"] = SPI.read(data[0]) else : sys.stderr.write("SPI port is not initialized. Use initSerial function first") return bck ### # WeIO native spells ### weioSpells = { "digitalWrite" :callDigitalWrite, "digitalRead" :callDigitalRead, "pulseIn" :callPulseIn, "portWrite" :callPortWrite, "portRead" :callPortRead, "dhtRead" :callDHTRead, "analogRead" :callAnalogRead, "pinMode" :callPinMode, "portMode" :callPortMode, "setPwmPeriod" :callSetPwmPeriod, "pwmWrite" :callPwmWrite, "proportion" :callProportion, "attachInterrupt" :callAttachInterrupt, "detachInterrupt" :callDetachInterrupt, "tone" :callTone, "noTone" :callNotone, "constrain" :callConstrain, "millis" :callMillis, "getTemperature" :callGetTemperature, "delay" :callDelay, "pinsInfo" :pinsInfo, "listSerials" :callListSerials, "initSerial" :callInitSerial, "serialWrite" :callSerialWrite, "initSPI" :callInitSPI, "readSPI" :callReadSPI, "writeSPI" :callWriteSPI # "message":callUserMesage } ### # User added spells (handlers) ### weioUserSpells = {} def addUserEvent(event, handler): global weioUserSpells #print "Adding event ", event #print "and handler ", handler weioUserSpells[event] = handler def removeUserEvents(): global weioUserSpells weioUserSpells.clear()
Do you know the feeling of finally getting your hands on a book that you’ve wanted for a long time? About six months ago I wandered into my local, enormous, largely impersonal book vendor (who for free-marketing sake will remain nameless) and asked if they carried the graphic novel The Pride of Baghdad. Of course they didn’t, but they probably could order it in. I didn’t want to go through the trouble of handing out all of my personal information one more time, so I said no thanks. Today, after sauntering in to spend a gift card, I noticed it on a wall of other graphic novels. I snatched it up and can’t wait to read it.
#!/usr/bin/env python # -*- coding: UTF-8 -*- from django.template import Library import re DEBUG = False register = Library() @register.filter def highlight_format(value): p_sub = re.compile('__codestart__ (\w+)') value = p_sub.sub(r'<pre name="code" class="\g<1>">', value) p_sub = re.compile(r'__codeend__', re.VERBOSE) value = p_sub.sub(r'</pre>', value) if DEBUG: print value print '+' * 80 p_highlight = re.compile(r'(<pre name="code" class="\w+">)(?P<codeblock>.*)(</pre>)', re.S) f_list = p_highlight.findall(value) if f_list: s_list = p_highlight.split(value) if DEBUG: for i in s_list: print i print '=' * 80 for code_block in p_highlight.finditer(value): code = code_block.group('codeblock') index = s_list.index(code) code = code.replace('&lt;', '<') code = code.replace('&gt;', '>') code = code.replace('&amp;', '&') code = code.replace('<p>', '') code = code.replace('</p>', '') s_list[index] = code value = ''.join(s_list) return value
You may list more or less than ten, but please share your favorite manga with us. If you would like to update your list, then just quote your previous one for a quicker and easier comparison. I'll list my top five to start off. I could only manage to fit five titles into my list, and even right now I'm not that confident in the ordering of these titles. The more I read, the more I'll be able to fill in here, so look out for titles like Kiichi!! showing up soon. Alright....I too like Lydia have far too many to list. I'll list the first favorites that come to mind. The first 3 are my true #'s 1-3 but after that no particular order is there. I don't know if I've read enough to have a top 10 list. I only read six manga! So I will list them. In order of preference. Well since ya wanna say you read so much manga I expect you to assist Aya in making the Manga section flourish. So get cracking missy. Psycho 101 wrote: Well since ya wanna say you read so much manga I expect you to assist Aya in making the Manga section flourish. So get cracking missy. Long ago there was much talk amongst us of making the actual ANN manga section grow like mad, but that was also when I ended up "quitting the internet". But, I'll do what I can to make it boom on both forums! I just need to finish my application to Japan first, so give me a couple weeks, and then it's ON! Not a Jellyfish wrote: But, I'll do what I can to make it boom on both forums! I just need to finish my application to Japan first, so give me a couple weeks, and then it's ON! Oh it's ON eh? Well BRING IT! Why do I have to be the only one who reads so much manga on here?? Well, I used to read a lot more till I got to college. Then I starting watching more anime. You've reminded me of how far behind I probably am with some of the releases I've been keeping up with. Let's see... What do I have still lying around my apartment: Gunm - Last Order, Ah! Megami-sami, NHK ni Yokoso!, Akira, Nausicaa of the Valley of the Wind, Gunsmith Cats (which will likely be unloaded at Book Off in the near future), Ghost in the Shell (English and Japanese), Dominion and a pile of flotsam. The titles I've listed in italics are series I've purchased the Japanese tankoubon of. Glad I could remind you of manga. Hehehe. When I started college it was actually the anime that went and the manga that increased. Oppostie for me. Uh... that was way more than 10. Oh well. I have a lot of manga.
# Caesar's Cipher is a very famous encryption technique used in cryptography. It is a type of substitution # cipher in which each letter in the plaintext is replaced by a letter some fixed number of positions down # the alphabet. For example, with a shift of 3, D would be replaced by G, E would become H, X would become A # and so on. # # Encryption of a letter X by a shift K can be described mathematically as # EK(X)=(X+K) % 26. # # Given a plaintext and it's corresponding ciphertext, output the minimum non-negative value of shift that was # used to encrypt the plaintext or else output −1 if it is not possible to obtain the given ciphertext from # the given plaintext using Caesar's Cipher technique. # # Input: # # The first line of the input contains Q, denoting the number of queries. # # The next Q lines contain two strings S and T consisting of only upper-case letters. # # Output: # # For each test-case, output a single non-negative integer denoting the minimum value of shift that was used # to encrypt the plaintext or else print −1 if the answer doesn't exist. # # Constraints: # 1≤Q≤5 # 1≤|S|≤10^5 # 1≤|T|≤10^5 # |S| = |T| # # SAMPLE INPUT # 2 # ABC # DEF # AAA # PQR # # SAMPLE OUTPUT # 3 # -1 # My Solution for _ in range(int(input())): string_one = input() string_two= input() check_one = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' # ZYXWVUTSRQPONMLKJIHGFEDCBA check_two = check_one[::-1] result = [] for i in range(len(string_one)): if(check_one.find(string_one[i]) > check_one.find(string_two[i])): result.append(check_two.find(string_one[i]) + check_one.find(string_two[i]) + 1) else: result.append(check_one.find(string_two[i]) - check_one.find(string_one[i])) if result.count(result[0]) == len(string_one): print(result[0]) else: print(-1) # More Efficient Solution: tests = int(input().strip()) for i in range(tests): plain = input().strip() cipher = input().strip() shift = (ord(cipher[0])-ord(plain[0])+26)%26 valid = True for j in range(len(plain)): if (ord(cipher[j])-ord(plain[j])+26)%26 != shift: valid = False break print(shift) if valid else print("-1")
Caratini will undergo surgery on his fractured left hamate bone Monday, Bruce Levine of 670TheScore.com reports. ROTOWIRE RECOMMENDS: Caratini suffered the injury in his final at-bat Thursday against the Pirates. He's expected to be out 4-to-6 weeks. He'll head to the injured list, with Taylor Davis called up from Triple-A Iowa to back up Willson Contreras. Big night against the Brewers. Have him on your list tomorrow.
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import collections import os from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError from ansible.module_utils.six import iteritems, binary_type, text_type from ansible.playbook.attribute import FieldAttribute from ansible.playbook.base import Base from ansible.playbook.become import Become from ansible.playbook.conditional import Conditional from ansible.playbook.helpers import load_list_of_blocks from ansible.playbook.role.metadata import RoleMetadata from ansible.playbook.taggable import Taggable from ansible.plugins.loader import get_all_plugin_loaders from ansible.utils.vars import combine_vars __all__ = ['Role', 'hash_params'] # TODO: this should be a utility function, but can't be a member of # the role due to the fact that it would require the use of self # in a static method. This is also used in the base class for # strategies (ansible/plugins/strategy/__init__.py) def hash_params(params): """ Construct a data structure of parameters that is hashable. This requires changing any mutable data structures into immutable ones. We chose a frozenset because role parameters have to be unique. .. warning:: this does not handle unhashable scalars. Two things mitigate that limitation: 1) There shouldn't be any unhashable scalars specified in the yaml 2) Our only choice would be to return an error anyway. """ # Any container is unhashable if it contains unhashable items (for # instance, tuple() is a Hashable subclass but if it contains a dict, it # cannot be hashed) if isinstance(params, collections.Container) and not isinstance(params, (text_type, binary_type)): if isinstance(params, collections.Mapping): try: # Optimistically hope the contents are all hashable new_params = frozenset(params.items()) except TypeError: new_params = set() for k, v in params.items(): # Hash each entry individually new_params.update((k, hash_params(v))) new_params = frozenset(new_params) elif isinstance(params, (collections.Set, collections.Sequence)): try: # Optimistically hope the contents are all hashable new_params = frozenset(params) except TypeError: new_params = set() for v in params: # Hash each entry individually new_params.update(hash_params(v)) new_params = frozenset(new_params) else: # This is just a guess. new_params = frozenset(params) return new_params # Note: We do not handle unhashable scalars but our only choice would be # to raise an error there anyway. return frozenset((params,)) class Role(Base, Become, Conditional, Taggable): _delegate_to = FieldAttribute(isa='string') _delegate_facts = FieldAttribute(isa='bool', default=False) def __init__(self, play=None, from_files=None): self._role_name = None self._role_path = None self._role_params = dict() self._loader = None self._metadata = None self._play = play self._parents = [] self._dependencies = [] self._task_blocks = [] self._handler_blocks = [] self._default_vars = dict() self._role_vars = dict() self._had_task_run = dict() self._completed = dict() if from_files is None: from_files = {} self._from_files = from_files super(Role, self).__init__() def __repr__(self): return self.get_name() def get_name(self): return self._role_name @staticmethod def load(role_include, play, parent_role=None, from_files=None): if from_files is None: from_files = {} try: # The ROLE_CACHE is a dictionary of role names, with each entry # containing another dictionary corresponding to a set of parameters # specified for a role as the key and the Role() object itself. # We use frozenset to make the dictionary hashable. params = role_include.get_role_params() if role_include.when is not None: params['when'] = role_include.when if role_include.tags is not None: params['tags'] = role_include.tags if from_files is not None: params['from_files'] = from_files if role_include.vars: params['vars'] = role_include.vars hashed_params = hash_params(params) if role_include.role in play.ROLE_CACHE: for (entry, role_obj) in iteritems(play.ROLE_CACHE[role_include.role]): if hashed_params == entry: if parent_role: role_obj.add_parent(parent_role) return role_obj r = Role(play=play, from_files=from_files) r._load_role_data(role_include, parent_role=parent_role) if role_include.role not in play.ROLE_CACHE: play.ROLE_CACHE[role_include.role] = dict() play.ROLE_CACHE[role_include.role][hashed_params] = r return r except RuntimeError: raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles", obj=role_include._ds) def _load_role_data(self, role_include, parent_role=None): self._role_name = role_include.role self._role_path = role_include.get_role_path() self._role_params = role_include.get_role_params() self._variable_manager = role_include.get_variable_manager() self._loader = role_include.get_loader() if parent_role: self.add_parent(parent_role) # copy over all field attributes, except for when and tags, which # are special cases and need to preserve pre-existing values for (attr_name, _) in iteritems(self._valid_attrs): if attr_name not in ('when', 'tags'): setattr(self, attr_name, getattr(role_include, attr_name)) current_when = getattr(self, 'when')[:] current_when.extend(role_include.when) setattr(self, 'when', current_when) current_tags = getattr(self, 'tags')[:] current_tags.extend(role_include.tags) setattr(self, 'tags', current_tags) # dynamically load any plugins from the role directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._role_path, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) # load the role's other files, if they exist metadata = self._load_role_yaml('meta') if metadata: self._metadata = RoleMetadata.load(metadata, owner=self, variable_manager=self._variable_manager, loader=self._loader) self._dependencies = self._load_dependencies() else: self._metadata = RoleMetadata() task_data = self._load_role_yaml('tasks', main=self._from_files.get('tasks')) if task_data: try: self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager) except AssertionError as e: raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=task_data, orig_exc=e) handler_data = self._load_role_yaml('handlers') if handler_data: try: self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader, variable_manager=self._variable_manager) except AssertionError as e: raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=handler_data, orig_exc=e) # vars and default vars are regular dictionaries self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True) if self._role_vars is None: self._role_vars = dict() elif not isinstance(self._role_vars, dict): raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) self._default_vars = self._load_role_yaml('defaults', main=self._from_files.get('defaults'), allow_dir=True) if self._default_vars is None: self._default_vars = dict() elif not isinstance(self._default_vars, dict): raise AnsibleParserError("The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) def _load_role_yaml(self, subdir, main=None, allow_dir=False): file_path = os.path.join(self._role_path, subdir) if self._loader.path_exists(file_path) and self._loader.is_directory(file_path): # Valid extensions and ordering for roles is hard-coded to maintain # role portability extensions = ['.yml', '.yaml', '.json'] # If no <main> is specified by the user, look for files with # extensions before bare name. Otherwise, look for bare name first. if main is None: _main = 'main' extensions.append('') else: _main = main extensions.insert(0, '') found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir) if found_files: data = {} for found in found_files: new_data = self._loader.load_from_file(found) if new_data and allow_dir: data = combine_vars(data, new_data) else: data = new_data return data elif main is not None: raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main)) return None def _load_dependencies(self): ''' Recursively loads role dependencies from the metadata list of dependencies, if it exists ''' deps = [] if self._metadata: for role_include in self._metadata.dependencies: r = Role.load(role_include, play=self._play, parent_role=self) deps.append(r) return deps # other functions def add_parent(self, parent_role): ''' adds a role to the list of this roles parents ''' if not isinstance(parent_role, Role): raise AnsibleAssertionError() if parent_role not in self._parents: self._parents.append(parent_role) def get_parents(self): return self._parents def get_default_vars(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain default_vars = dict() for dep in self.get_all_dependencies(): default_vars = combine_vars(default_vars, dep.get_default_vars()) if dep_chain: for parent in dep_chain: default_vars = combine_vars(default_vars, parent._default_vars) default_vars = combine_vars(default_vars, self._default_vars) return default_vars def get_inherited_vars(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain inherited_vars = dict() if dep_chain: for parent in dep_chain: inherited_vars = combine_vars(inherited_vars, parent._role_vars) return inherited_vars def get_role_params(self, dep_chain=None): dep_chain = [] if dep_chain is None else dep_chain params = {} if dep_chain: for parent in dep_chain: params = combine_vars(params, parent._role_params) params = combine_vars(params, self._role_params) return params def get_vars(self, dep_chain=None, include_params=True): dep_chain = [] if dep_chain is None else dep_chain all_vars = self.get_inherited_vars(dep_chain) for dep in self.get_all_dependencies(): all_vars = combine_vars(all_vars, dep.get_vars(include_params=include_params)) all_vars = combine_vars(all_vars, self.vars) all_vars = combine_vars(all_vars, self._role_vars) if include_params: all_vars = combine_vars(all_vars, self.get_role_params(dep_chain=dep_chain)) return all_vars def get_direct_dependencies(self): return self._dependencies[:] def get_all_dependencies(self): ''' Returns a list of all deps, built recursively from all child dependencies, in the proper order in which they should be executed or evaluated. ''' child_deps = [] for dep in self.get_direct_dependencies(): for child_dep in dep.get_all_dependencies(): child_deps.append(child_dep) child_deps.append(dep) return child_deps def get_task_blocks(self): return self._task_blocks[:] def get_handler_blocks(self, play, dep_chain=None): block_list = [] # update the dependency chain here if dep_chain is None: dep_chain = [] new_dep_chain = dep_chain + [self] for dep in self.get_direct_dependencies(): dep_blocks = dep.get_handler_blocks(play=play, dep_chain=new_dep_chain) block_list.extend(dep_blocks) for task_block in self._handler_blocks: new_task_block = task_block.copy() new_task_block._dep_chain = new_dep_chain new_task_block._play = play block_list.append(new_task_block) return block_list def has_run(self, host): ''' Returns true if this role has been iterated over completely and at least one task was run ''' return host.name in self._completed and not self._metadata.allow_duplicates def compile(self, play, dep_chain=None): ''' Returns the task list for this role, which is created by first recursively compiling the tasks for all direct dependencies, and then adding on the tasks for this role. The role compile() also remembers and saves the dependency chain with each task, so tasks know by which route they were found, and can correctly take their parent's tags/conditionals into account. ''' block_list = [] # update the dependency chain here if dep_chain is None: dep_chain = [] new_dep_chain = dep_chain + [self] deps = self.get_direct_dependencies() for dep in deps: dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain) block_list.extend(dep_blocks) for idx, task_block in enumerate(self._task_blocks): new_task_block = task_block.copy() new_task_block._dep_chain = new_dep_chain new_task_block._play = play if idx == len(self._task_blocks) - 1: new_task_block._eor = True block_list.append(new_task_block) return block_list def serialize(self, include_deps=True): res = super(Role, self).serialize() res['_role_name'] = self._role_name res['_role_path'] = self._role_path res['_role_vars'] = self._role_vars res['_role_params'] = self._role_params res['_default_vars'] = self._default_vars res['_had_task_run'] = self._had_task_run.copy() res['_completed'] = self._completed.copy() if self._metadata: res['_metadata'] = self._metadata.serialize() if include_deps: deps = [] for role in self.get_direct_dependencies(): deps.append(role.serialize()) res['_dependencies'] = deps parents = [] for parent in self._parents: parents.append(parent.serialize(include_deps=False)) res['_parents'] = parents return res def deserialize(self, data, include_deps=True): self._role_name = data.get('_role_name', '') self._role_path = data.get('_role_path', '') self._role_vars = data.get('_role_vars', dict()) self._role_params = data.get('_role_params', dict()) self._default_vars = data.get('_default_vars', dict()) self._had_task_run = data.get('_had_task_run', dict()) self._completed = data.get('_completed', dict()) if include_deps: deps = [] for dep in data.get('_dependencies', []): r = Role() r.deserialize(dep) deps.append(r) setattr(self, '_dependencies', deps) parent_data = data.get('_parents', []) parents = [] for parent in parent_data: r = Role() r.deserialize(parent, include_deps=False) parents.append(r) setattr(self, '_parents', parents) metadata_data = data.get('_metadata') if metadata_data: m = RoleMetadata() m.deserialize(metadata_data) self._metadata = m super(Role, self).deserialize(data) def set_loader(self, loader): self._loader = loader for parent in self._parents: parent.set_loader(loader) for dep in self.get_direct_dependencies(): dep.set_loader(loader)
Published 04/21/2019 08:48:00 am at 04/21/2019 08:48:00 am in Walmart Tv Wall Mount 55 Inch. walmart tv wall mount 55 inch vizio 55 inch tv wall mount inch creative wall mount wall mount wall stand ed stand vizio 55 inch tv wall mount walmart tv wall mounts howtoshoppinginfo walmart tv wall mounts wall mount in store wall mount stores walmart tv wall mount. walmart tv stands with mounts universal stands luxury swivel glass walmart tv stands with mounts stands stands inch interior console ideas stand askew white inch mount walmart tv stands with mounts , lcd led plasma flat tilt tv wall mount bracket inch angle lcd led plasma flat tilt tv wall mount bracket inch angle adjustable, vizio tv wall mount walmart wall mounts wall mounting the wall mount vizio tv wall mount walmart wall stand floating stand stand on wall floating stand wall mount vizio tv wall mount walmart , wall mount for tv wall mount stand for wall mounted stunning wall mount for tv wall mount stand for wall mounted stunning contemporary shelf mount best regarding stands inspirations wall mount wall mount inch tv , best full motion wall mount tv sanus walmart canada for most flat full size of full motion tv wall mount inch reviews sanus for princess auto kids , tv shelf mount wall mounted shelf interesting with drawer home tv , tv stand with mount walmart stands with mount stands mount stands tv stand with mount walmart corner mount corner stand with mount corner media cabinet with tv stand with mount walmart , walmart tv mount inch wall mounts for at walmart tv mount inch walmart tv mount inch inch wall mount articulating full motion tilt wall , inch tv stands with mount view a larger image of the transdeco inch tv stands with mount altra tv stand walmart awesome inch tv wall mount , inch adjustable movable folding universal tv stand inch adjustable movable folding universal tv stand pedestal base, tv wall mount at walmart custom mount a project blog mount corner tv wall mount at walmart stand mount wall shelves design wall mount stand with shelves soundbar tv wall mount at walmart .
#!/usr/bin/env python3 # Copyright (C) 2020 IBM Corp. # This program is Licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. See accompanying LICENSE file. import argparse import sys import ast import math def diff_float(na, nb, threshold): for a, b in zip(na, nb): if not math.isclose(a, b, abs_tol=threshold): raise ValueError(f"Difference {a - b} between {a} and {b} " f"exceeds threshold {threshold}.") def makeSameSize(a, b, max_length): lenA, lenB = len(a), len(b) if lenA > max_length or lenB > max_length: raise ValueError(f"Size of slots for {a}({lenA}) {b}({lenB}) " f"> {max_length}.") if lenA == lenB: return a, b else: maxSz = max(lenA, lenB) a += [0] * (maxSz - lenA) b += [0] * (maxSz - lenB) return (a, b) def parseCorrectly(la, lb, decrypt): error_msg = "Type mismatch. {0}({1}) and {2}({3}) type do not match." if decrypt: for a, b in zip(la, lb): a, b = ast.literal_eval(a), ast.literal_eval(b) if type(a) is not type(b): raise TypeError(error_msg.format(a, type(a), b, type(b))) yield a, b else: for a, b in zip(la, lb): a = [[ float(i) for i in a.split(",") ]] b = [[ float(i) for i in b.split(",") ]] if type(a) is not type(b): raise TypeError(error_msg.format(a, type(a), b, type(b))) yield a, b def main(): parser = argparse.ArgumentParser() parser.add_argument("firstfile", help="first data file", type=str) parser.add_argument("secondfile", help="second data file", type=str) parser.add_argument("--decrypt", help="diff decrypt format (instead of decode)", action='store_true') parser.add_argument("--threshold", help="error threshold [default=0.001]", type=float, default=0.001) args = parser.parse_args() with open(args.firstfile, 'r') as f1, open(args.secondfile, 'r') as f2: l1, l2 = list(f1), list(f2) if len(l1) != len(l2): sys.exit(f"Different number of lines. " f"First contains {len(l1)} second contains {len(l2)}.") if l1[0] != l2[0]: sys.exit(f"File headers differ. {l1[0]} {l2[0]}.") try: for a, b in parseCorrectly(l1[1:], l2[1:], args.decrypt): for sa, sb in zip(a, b): sa, sb = makeSameSize(sa, sb, 2) diff_float(sa, sb, args.threshold) except (TypeError, ValueError) as e: sys.exit(str(e)) if __name__ == "__main__": main()
The Department of Drama in the College of Humanities at the University of Exeter are looking for a new Professor. Drama at Exeter is internationally-renowned for its practice, research and teaching, and will celebrate its 50th anniversary in 2018. It has had notable success in achieving external research income, including several large AHRC awards, Creative Fellow and Academic Fellowship awards, and workshop and practice-based Research Council projects. We foster a strong culture of peer support in relation to staff research, hosting three Research Centres within the Department: the Centre for Contemporary Performance Practices; the Centre for Performance Histories and Cultures; and the Centre for Performance, Science and Community. All eligible Drama staff were submitted to REF2014, in which 95% of our research was graded at internationally recognised levels. Exeter Drama has performed consistently highly in the NSS, and is currently in the top 3 in The Times and Sunday Times Good University Guide 2017. We are one of the largest and best equipped Drama departments in the UK with over twenty permanent staff, eleven practice studios, and over a dozen other seminar, workshop and multimedia spaces. We offer a supportive and stimulating environment, and are well connected internationally, through our split-site programme with NIAS, Bangalore and staff and student exchanges with Austin, Texas. There is a strong tradition of Practice-as-Research. We collaborate with organisations such as the Globe Theatre, the National Theatre, the Drottningholm Theatre, regional and national theatre companies, and public sector bodies such as hospitals, prisons and schools. Researchers also have strong links with the wider creative economy sector, and graduate companies include Arts Council England National Portfolio Organisations, such as Forced Entertainment, Punchdrunk and Theatre Alibi. The Department of Drama in the College of Humanities at the University of Exeter are looking for a leading international figure with the ability to attract world-class academics and post-graduate research students. Applicants will be innovative researchers with a strong track record of research funding and international quality publications.The Professor in Drama, Theatre or Performance Studies will contribute to extending the research profile of Drama at Exeter, and to its dynamic teaching environment. We are particularly interested in enhancing leadership in areas related or complementary to international performance practice; or applied, community and socially-engaged performance. The University of Exeter is a Russell Group University in the top one percent of institutions globally. In the last few years we have invested strategically to deliver more than GBP 350 million worth of new facilities across our campuses with plans for significant investment in the future. This full-time post is available from 1 September 2017 on a permanent basis. Applicants are encouraged to contact the Head of Discipline, Professor Stephen Hodge (tel: 01392 724524, email: S.Hodge@exeter.ac.uk) to discuss the post further. You may also wish to consult our web site at http://humanities.exeter.ac.uk for further details of the College. The closing date for completed applications is Sunday 15 January 2017. Shortlisting is expected to take place on Tuesday 24 January 2017 with interviews likely to take place in mid-February 2017.
#!/usr/bin/env python3 # -*- coding: UTF-8 -*- ############################################################################ # # keyboard_list_generator.py # ############################################################################ # # Author: Videonauth <videonauth@googlemail.com> # Date: 09.07.2016 # Purpose: # Generate a word-list for the keyboard sequence check. # Written for: Python 3.5.1 # ############################################################################ de_lowercase = "qwertzuiopü+asdfghjkllöä#yxcvbnm,.-" de_uppercase = "°!§$%&/()=?WERTZUIOPÜ*ASDFGHJKLÖÄ'YXCVBNM;:_" en_lowercase = "-=qwertyuiop[]asdfghjkl;'zxcvbnm,./" en_uppercase = "~!@#$%^&*()_QWERTYUIOP{}ASDFGHJKL:ZXCVBNM<>?" # next line might error out if destination file does not exist with open('../lists/keyboard.wl', 'r+') as file: for a in range(3, len(de_lowercase) + 1): for b in range(len(de_lowercase)): if len(de_lowercase[b: b + a]) == a: file.write(de_lowercase[b: b + a] + '\n') for a in range(3, len(de_uppercase) + 1): for b in range(len(de_uppercase)): if len(de_uppercase[b: b + a]) == a: file.write(de_uppercase[b: b + a] + '\n') for a in range(3, len(en_lowercase) + 1): for b in range(len(en_lowercase)): if len(en_lowercase[b: b + a]) == a: file.write(en_lowercase[b: b + a] + '\n') for a in range(3, len(en_uppercase) + 1): for b in range(len(en_uppercase)): if len(en_uppercase[b: b + a]) == a: file.write(en_uppercase[b: b + a] + '\n') de_lowercasere = de_lowercase[:: -1] de_uppercasere = de_uppercase[:: -1] en_lowercasere = en_lowercase[:: -1] en_uppercasere = en_uppercase[:: -1] for a in range(3, len(de_lowercasere) + 1): for b in range(len(de_lowercasere)): if len(de_lowercasere[b: b + a]) == a: file.write(de_lowercasere[b: b + a] + '\n') for a in range(3, len(de_uppercasere) + 1): for b in range(len(de_uppercasere)): if len(de_uppercasere[b: b + a]) == a: file.write(de_uppercasere[b: b + a] + '\n') for a in range(3, len(en_lowercasere) + 1): for b in range(len(en_lowercasere)): if len(en_lowercasere[b: b + a]) == a: file.write(en_lowercasere[b: b + a] + '\n') for a in range(3, len(en_uppercasere) + 1): for b in range(len(en_uppercasere)): if len(en_uppercasere[b: b + a]) == a: file.write(en_uppercasere[b: b + a] + '\n') file.close()
A new study says sleep trackers might be making you sleep poorly. Here's what that really means. A New Study Says Sleep Trackers Might Be Making You Sleep Poorly. Here’s What That Really Means. Long gone are the days of having to calculate how many hours of sleep you got last night. Between fitness trackers, downloadable apps, and the health kits that come on our smartphones, pretty much everyone in the modern world has access to sleep data—how long they slept, and how well—right at their fingertips. But could all that data actually make your sleep worse? That’s the implication from a new study published in the Journal of Clinical Sleep Medicine. Researchers at Rush University highlighted three case studies from their sleep lab, including a 39-year-old man who bought a sleep tracker after having trouble getting a good night’s sleep. As soon as he began checking the tracker on his goal of logging at least eight hours of sleep every night, though, he actually started to felt worse than before he had the facts at his fingertips. Similarly, a 27-year-old woman complained of feeling “unrefreshed” after waking up from what her device said was a poor night’s sleep, but, once in a laboratory setting separated from her tracker, she actually sleep soundly and in a state of deep sleep. Baron readily admits trackers are certainly not all bad. And for healthy sleepers, trackers can indeed help raise awareness about sleep, says Jessica Payne, Ph.D., associate professor of psychology who runs the Sleep, Stress, and Memory Lab at the University of Notre Dame, who wasn’t involved in the study. “Just like if you learn that, at the end of the day, you’ve only walked 5000 steps, finding that you’ve only slept five or six hours can motivate you to do better the next day,” Payne explains. It’s when people start obsessing over the data that trackers become counterproductive. And even though this study was small, highlighting only three case studies, but both experts say this anxiety from data is a pattern they see in their clinical practice and research. The problem is this: When armed with the most detailed data on your sleep, you start stressing about not getting enough shuteye, which intensifies what Payne calls the “sleep, stress snowball”—the more stressed you are, the less or more poorly you sleep. In turn, the more sleep deprived you are, the more stress your brain and body is under. “That’s a powerful negative association that can be difficult to unlearn,” Payne adds. What’s more, your sleep is controlled by homeostatic sleep pressure, Baron explains, which basically says the longer you’re awake, the sleepier you are. Contrarily, the longer someone spends in bed attempting to snooze (which often happens with insomniacs), the more people have difficulty falling asleep, the more frequently they wake up, and the harder of a time they have getting back to dreamland. If you love your tracker, we’re certainly not saying toss it aside. After all, we hear countless stories about guys who didn’t realize they were in that health- and hormone-compromising zone of under six hours a night until they started checking a tracker. But take sleep data with a grain of salt, because, in reality, it’s not steadfastly reliable. “I’ve tried most trackers and wear one myself,” Baron says. “I find the data to be motivating for my sleep and steps—they just aren’t very accurate about measuring sleep.” She’s seen trackers err by up to 45 minutes when compared to lab measurements. It’s really the focus on a specific number that’s most often at blame for sleep trackers screwing with people’s sleep, Baron says. “Don’t fixate on making it perfect each day—instead, look for the overall trend toward a consistent sleep schedule,” she says.
from matplotlib.colors import LightSource # Adapted from https://github.com/jobar8/graphics def alpha_blend(rgb, intensity, alpha=0.7): return alpha * rgb + (1 - alpha) * intensity def get_hs(data, cmap, norm=None, zf=10, azdeg=315, altdeg=45, dx=1, dy=1, fraction=1.5, blend_mode='alpha', alpha=0.7, **kwargs_norm): ls = LightSource(azdeg, altdeg) if blend_mode == 'alpha': # transparency blending rgb = ls.shade(data, cmap=cmap, norm=norm, blend_mode=alpha_blend, vert_exag=zf, dx=dx, dy=dy, fraction=fraction, alpha=alpha, **kwargs_norm) else: rgb = ls.shade(data, cmap=cmap, norm=norm, blend_mode=blend_mode, vert_exag=zf, dx=dx, dy=dy, fraction=fraction, **kwargs_norm) return rgb
With such a vast architectural, historical and cultural heritage, India has much to offer travellers, however the process of obtaining an Indian tourist visa has, until now, been considered a formidable task. Martin Randall Travel, along with many others in the travel industry, has welcomed the news of the extension of the e-visa to British citizens, which will make obtaining an Indian visa simpler, quicker and less expensive. Visitors from countries all over the globe including Australia, Canada, the USA and, since 15th August 2015, the UK can now apply for the e-visa which costs £39 plus a small administration fee, and apply, pay and upload documents online. The visa can be applied for 30 days in advance of entry and is valid for a stay of up to 30 days, but cannot be extended. James Palmer, Operations Executive for our India tours, says “This is a very encouraging development for British nationals travelling to India. It is no longer necessary to surrender one's passport before departure or go through the tedious process of queuing at a visa centre”.
# -*- coding: utf-8 -*- ''' ____ ___ ____________ ___ ___ ____ _________________ / __ \/ _ | / __/ _/ __/ / _ \/ _ \/ __ \__ / / __/ ___/_ __/ / /_/ / __ |_\ \_/ /_\ \ / ___/ , _/ /_/ / // / _// /__ / / \____/_/ |_/___/___/___/ /_/ /_/|_|\____/\___/___/\___/ /_/ Operational Aid Source for Infra-Structure Created on 2020. 3. 18.. @author: Hye-Churn Jang, CMBU Specialist in Korea, VMware [jangh@vmware.com] ''' #=============================================================================== # Prepare PostgreSQL Server #=============================================================================== # docker run --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=password -e POSTGRES_USER=pygics -e POSTGRES_DB=pygicsdb -d postgres from pygics import load, logInfo load('modules.postgres') # Login Database SDK.PygicsDB.system('localhost:5432', 'pygics', 'password') # "User" Table at "PygicsDB" Database User = SDK.PygicsDB.User logInfo('Create Users') with SDK.PygicsDB: # Open Transaction for Create Records User('Tony', 'Tony Stark', 'IronMan') User('Peter', 'Peter Parker', 'SpiderMan') User('Peter', 'Peter Pan', 'Elf') logInfo('Get All Users\n{}'.format(User.list())) # query form based SQLAlchemy logInfo('Find All Peters\n{}'.format(User.list(User.name == 'Peter', order='id'))) with SDK.PygicsDB: # Open Transaction tony = User.list(User.name == 'Tony')[0] tony.nickname = 'Avengers Leader' # Update Data tony.update() logInfo('Check Tony Changed\n{}'.format(User.list(User.name == 'Tony'))) logInfo('Delete All Users') with SDK.PygicsDB: # Open Transaction for Delete for user in User.list(): user.delete() logInfo('Check Users Empty\n{}'.format(User.list()))
Feel like home at our free ringtones website, where you can aquire the most popular ringtones for your phone. Today we have a ringtone of “Patrick ryan” by Symphony - Patrick Ryan as well as 2 other ringtones by Symphony - Patrick Ryan. You can listen to all 2 Symphony - Patrick Ryan – “Patrick ryan” ringtone variations available here and download it for free to your PC. You can also examine other albums produced by Symphony - Patrick Ryan for all time. “Patrick ryan” is referred to , and Audiko offer a huge amount of songs absolutely for free. And finally, we have collected lyrics for this track and you can view it here as well. So feel free to browse, listen, download ringtones at Audiko.net. Make your phone sound cool! Download Symphony - Patrick Ryan by Patrick ryan at Audiko fast and FREE!
''' A bunch of support functions used for SCG optimisation. They depend on the parallel implementation framework, but may change for other optimisers. ''' import glob import time import numpy from os.path import splitext from local_MapReduce import load, save time_acc = { 'embeddings_set_grads' : [], 'embeddings_get_grads_mu' : [], 'embeddings_get_grads_kappa' : [], 'embeddings_get_grads_theta' : [], 'embeddings_get_grads_current_grad' : [], 'embeddings_get_grads_gamma' : [], 'embeddings_get_grads_max_d' : [], 'embeddings_set_grads_reset_d' : [], 'embeddings_set_grads_update_d' : [], 'embeddings_set_grads_update_X' : [], 'embeddings_set_grads_update_grad_old' : [], 'embeddings_set_grads_update_grad_new' : [], } ''' Initialisation for local statistics ''' def embeddings_set_grads(folder): ''' Sets the grads and other local statistics often needed for optimisation locally for each node. This is currently only implemented locally, but could easly be adapted to the MapReduce framework to be done on remote nodes in parallel. There's no real need to do this in parallel though, as the computaions taking place are not that time consuming. ''' global time_acc start = time.time() input_files = sorted(glob.glob(folder + '/*.grad_latest.npy')) for file_name in input_files: grads = load(file_name) #print 'grads' #print grads # Save grad new as the latest grad evaluated new_file = splitext(splitext(file_name)[0])[0] + '.grad_new.npy' save(new_file, grads) # Init the old grad to be grad new new_file = splitext(splitext(file_name)[0])[0] + '.grad_old.npy' save(new_file, grads) # Save the direction as the negative grad new_file = splitext(splitext(file_name)[0])[0] + '.grad_d.npy' save(new_file, -1 * grads) end = time.time() time_acc['embeddings_set_grads'] += [end - start] ''' Getters for local statistics ''' def embeddings_get_grads_mu(folder): ''' Get the sum over the inputs of the inner product of the direction and grad_new ''' global time_acc start = time.time() mu = 0 grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_new_file, grad_d_file in zip(grad_new_files, grad_d_files): grad_new = load(grad_new_file) grad_d = load(grad_d_file) mu += (grad_new * grad_d).sum() end = time.time() time_acc['embeddings_get_grads_mu'] += [end - start] return mu def embeddings_get_grads_kappa(folder): ''' Get the sum over the inputs of the inner product of the direction with itself ''' global time_acc start = time.time() kappa = 0 grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_d_file in grad_d_files: grad_d = load(grad_d_file) kappa += (grad_d * grad_d).sum() end = time.time() time_acc['embeddings_get_grads_kappa'] += [end - start] return kappa def embeddings_get_grads_theta(folder): ''' Get the sum over the inputs of the inner product of the direction and grad_latest ''' global time_acc start = time.time() theta = 0 grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) grad_latest_files = sorted(glob.glob(folder + '/*.grad_latest.npy')) grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_latest_file, grad_d_file, grad_new_file in zip(grad_latest_files, grad_d_files, grad_new_files): grad_latest = load(grad_latest_file) grad_new = load(grad_new_file) grad_d = load(grad_d_file) theta += (grad_d * (grad_latest - grad_new)).sum() end = time.time() time_acc['embeddings_get_grads_theta'] += [end - start] return theta def embeddings_get_grads_current_grad(folder): ''' Get the sum over the inputs of the inner product of grad_new with itself ''' global time_acc start = time.time() current_grad = 0 grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) for grad_new_file in grad_new_files: grad_new = load(grad_new_file) current_grad += (grad_new * grad_new).sum() end = time.time() time_acc['embeddings_get_grads_current_grad'] += [end - start] return current_grad def embeddings_get_grads_gamma(folder): ''' Get the sum over the inputs of the inner product of grad_old and grad_new ''' global time_acc start = time.time() gamma = 0 grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) grad_old_files = sorted(glob.glob(folder + '/*.grad_old.npy')) for grad_new_file, grad_old_file in zip(grad_new_files, grad_old_files): grad_new = load(grad_new_file) grad_old = load(grad_old_file) gamma += (grad_new * grad_old).sum() end = time.time() time_acc['embeddings_get_grads_gamma'] += [end - start] return gamma def embeddings_get_grads_max_d(folder, alpha): ''' Get the max abs element of the direction over all input files ''' global time_acc start = time.time() max_d = 0 grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_d_file in grad_d_files: grad_d = load(grad_d_file) max_d = max(max_d, numpy.max(numpy.abs(alpha * grad_d))) end = time.time() time_acc['embeddings_get_grads_max_d'] += [end - start] return max_d ''' Setters for local statistics ''' def embeddings_set_grads_reset_d(folder): ''' Reset the direction to be the negative of grad_new ''' global time_acc start = time.time() input_files = sorted(glob.glob(folder + '/*.grad_new.npy')) for file_name in input_files: grads = load(file_name) # Save the direction as the negative grad new_file = splitext(splitext(file_name)[0])[0] + '.grad_d.npy' save(new_file, -1 * grads) end = time.time() time_acc['embeddings_set_grads_reset_d'] += [end - start] def embeddings_set_grads_update_d(folder, gamma): ''' Update the value of the direction for each input to be gamma (given) times the old direction minus grad_new ''' global time_acc start = time.time() grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy')) grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) for grad_new_file, grad_d_file in zip(grad_new_files, grad_d_files): grad_new = load(grad_new_file) grad_d = load(grad_d_file) save(grad_d_file, gamma * grad_d - grad_new) end = time.time() time_acc['embeddings_set_grads_update_d'] += [end - start] def embeddings_set_grads_update_X(folder, alpha): ''' Update the value of the local embeddings and variances themselves to be X + alpha * direction ''' global time_acc start = time.time() grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy')) X_mu_files = sorted(glob.glob(folder + '/*.embedding.npy')) X_S_files = sorted(glob.glob(folder + '/*.variance.npy')) for grad_d_file, X_mu_file, X_S_file in zip(grad_d_files, X_mu_files, X_S_files): grad_d = load(grad_d_file) grad_d_X_mu = grad_d[0] grad_d_X_S = grad_d[1] X_mu = load(X_mu_file) X_S = load(X_S_file) #print 'X_mu' #print X_mu #print 'X_S' #print X_S save(X_mu_file, X_mu + alpha * grad_d_X_mu) save(X_S_file, X_S + alpha * grad_d_X_S) end = time.time() time_acc['embeddings_set_grads_update_X'] += [end - start] def embeddings_set_grads_update_grad_old(folder): ''' Set grad_old to be grad_new ''' global time_acc start = time.time() input_files = sorted(glob.glob(folder + '/*.grad_new.npy')) for file_name in input_files: grads = load(file_name) # Save grad old as latest grad new new_file = splitext(splitext(file_name)[0])[0] + '.grad_old.npy' save(new_file, grads) end = time.time() time_acc['embeddings_set_grads_update_grad_old'] += [end - start] def embeddings_set_grads_update_grad_new(folder): ''' Set grad_new to be grad_latest (a temp grad that keeps changing every evaluation) ''' global time_acc start = time.time() input_files = sorted(glob.glob(folder + '/*.grad_latest.npy')) for file_name in input_files: grads = load(file_name) # Save grad old as latest grad new new_file = splitext(splitext(file_name)[0])[0] + '.grad_new.npy' save(new_file, grads) end = time.time() time_acc['embeddings_set_grads_update_grad_new'] += [end - start]
For years, Sumiko has provided the market with wonderful sounding cartridges. With moving magnet designs, as well as low and high output moving coils, Sumiko has a solution for just about anyone's needs. At the top of their line lies the flagship Palo Santos Presentation. To say that it's a departure for them is an understatement. Sure, the body looks similar to the Celebration II, but that's where the similarities end. The Palo Santos has by far the smoothest sound of any Sumiko cartridge I've heard. The details in the music are present, but never overbearing. Bass is round, and highs are gentle. The midrange blooms like a rose, and vocals are immediate. Until now, Sumiko cartridges have been known for being bright and lively, if a little tipped up. The Palo Santos is the exact opposite, offering one of the more romantic sounds on the market today. This cartridge is truly for music lovers.
#!usr/bin/python # -*- coding: utf-8 -*- __plugins__ = ('BwLuminosity', 'BwGreen', 'BwOrange', 'BwRed', 'BwYellow', 'BwInfrared') __version__ = '2011-03-20' __author__ = 'Karol Będkowski' __copyright__ = "Copyright (c) Karol Będkowski, 2011" import ImageOps from photomagick.common import colors from photomagick.common.base_filter import BaseFilter from photomagick.common.const import CATEGORY_BASE class BwLuminosity(BaseFilter): STEPS = 3 NAME = _("BW Luminosity") CATEGORY = CATEGORY_BASE def process(self, image): yield 'Start...', image image = colors.convert_to_luminosity(image) yield 'Contrast...', image image = ImageOps.autocontrast(image) yield 'Done', image class _BwFilter(BaseFilter): STEPS = 3 NAME = 'BW Filter' CATEGORY = CATEGORY_BASE _COLOR = (1, 1, 1) def process(self, image): yield 'Start...', image image = colors.color_mixer_monochrome(image, *self._COLOR) yield 'Contrast...', image image = ImageOps.autocontrast(image) yield 'Done', image class BwGreen(_BwFilter): NAME = _('BW Green Filter') _COLOR = 0.04, 0.27, 0.08 class BwOrange(_BwFilter): NAME = _('BW Orange Filter') _COLOR = (0.31, 0.09, 0) class BwRed(_BwFilter): NAME = _('BW Red Filter') _COLOR = (0.35, 0.04, 0) class BwYellow(_BwFilter): NAME = _('BW Yellow Filter') _COLOR = (0.24, 0.11, 0.05) class BwInfrared(_BwFilter): NAME = _('BW Infrared') _COLOR = (0.15, 1.15, -0.30)
Consultant – The European Consultancy Hub. Control over your destiny and increased earning potential. Isn’t that why you decided to be a consultant? You have the freedom to travel the globe and seek the most fulfilling and financially rewarding role. You may just be starting out or have honed a skill-set over the years to pitch for high-profile boardroom roles. Your network is everything to you, to ensure that you remain ahead of the curve.
# Copyright (C) 2016 Reed Anderson. # From: https://github.com/ReedAnders/deepmap # License: MIT BY https://opensource.org/licenses/MIT import pickle, os, binascii from collections import deque import numpy as np from math import exp from random import random class NodeMap: def __init__(self, input_node_population=12, output_node_population=1, latent_node_population=400): self.coordinate_map = [] self.input_nodes = [InputNode() for node in range(input_node_population)] self.output_nodes = [OutputNode() for node in range(output_node_population)] self.latent_nodes = [LatentNode() for node in range(latent_node_population)] self.all_nodes = self.input_nodes + self.output_nodes + self.latent_nodes def construct_map(self): for node in self.all_nodes: self.coordinate_map.append((node.name, node.coordinates)) for node in self.all_nodes: node.find_neighbors(self.coordinate_map) self.update_input_values() # pickle.dump( self.coordinate_map, open( "pickles/coordinate_map.p", "wb" ) ) # pickle.dump( self.input_nodes, open( "pickles/input_nodes.p", "wb" ) ) # pickle.dump( self.output_nodes, open( "pickles/output_nodes.p", "wb" ) ) # pickle.dump( self.latent_nodes, open( "pickles/latent_nodes.p", "wb" ) ) def calculate_dimensions(self): n_params = 0 for node in self.all_nodes: n_params += 2 n_params += len(node.true_neighbor_index) return n_params def error(self, correct_labels, predicted_labels): error = None pattern_error = [] n_training_patterns = len(correct_labels) for i in range(n_training_patterns): _sum = sum([(y-o)**2 for y,o in zip(correct_labels, predicted_labels)]) pattern_error.append(_sum) error = 1.0/n_training_patterns * sum(pattern_error) return error def train(self, training_patterns, param): n_training_patterns = len(training_patterns) for i in training_patterns: n_labels = len(self.output_nodes) inputs = i[:-n_labels] c_labels = i[-n_labels:] p_labels = self.evaluate_topology(inputs, param) error = self.error(c_labels, p_labels) fitness = 1 - error print 'ERROR: %r' % (error) return error, fitness def evaluate_topology(self, data, param): p_labels = [] for index, node in enumerate(self.input_nodes): node.value = float(data[index]) # Trim parameters p_len = len(param) t_len = len(self.latent_nodes + self.output_nodes) * 2 w_len = p_len - t_len w_para = param[:w_len] # t_para = deque(param[w_len-2:]) # Evaluate function for node in self.latent_nodes + self.output_nodes: self.evaluate_weights(w_para) t_para = deque(param[w_len-2:]) # for node in self.latent_nodes + self.output_nodes: # node_topo_params = [t_para.popleft() for _i in range(2)] # node.eval_neighbors(node_topo_params[0],node_topo_params[1]) # Return predicted labels p_labels = [node.value for node in self.output_nodes] return p_labels def evaluate_weights(self, param): w_para = deque(param) for node in self.latent_nodes + self.output_nodes: neighbors = len(node.true_neighbor_index) node_weight_params = [w_para.popleft() for _i in range(neighbors)] node.eval_sigmoid(node_weight_params) self.update_input_values() def update_input_values(self): for node in self.output_nodes + self.latent_nodes: for index in node.true_neighbor_index: node.input_values.append(self.all_nodes[index].value) class Node: def __init__(self, dimensions=3): self.name = binascii.b2a_hex(os.urandom(8)) self.coordinates = np.array([random() for i in range(dimensions)]) self.neighbors = [] self.true_neighbor_index = [] self.optimal_neighbor_set = set() self.value = 0.0 def find_neighbors(self, coordinate_map): for index, node in enumerate(coordinate_map): if np.linalg.norm(self.coordinates-node[1]) < 0.3: self.true_neighbor_index.append(index) self.neighbors.append((node,True)) else: self.neighbors.append((node,False)) # Two parameters between -1, 1 def eval_neighbors(self, lower_bound, upper_bound): for index in self.true_neighbor_index: dist = np.linalg.norm(self.coordinates-self.neighbors[index][0][1]) if dist > lower_bound and dist < upper_bound: self.optimal_neighbor_set.add(index) class InputNode(Node): def __init__(self): Node.__init__(self) class LatentNode(Node): def __init__(self): Node.__init__(self) self.value = random() self.input_values = [] # Multiple parameters for n weights -1, 1 def eval_sigmoid(self, weights): x = sum([w*v for w,v in zip(weights, self.input_values)]) self.value = 1 / (1 + exp(-x)) class OutputNode(LatentNode): def __init__(self): LatentNode.__init__(self)
We had worked with different business owners in Camp Meeker area, and 99% have been satisfied with our service for 10 or even 20 years. We are always on time in delivering your construction dumpster in Camp Meeker, CA since we know that you follow a strict schedule. You can be sure that your dumpster will arrive to the desired location before you start your project. Call us now at 888-609-4426 to request a free quote. We do not only provide you with quality dumpster for rent in Camp Meeker, CA, but we also offer the most competitive rates. You don’t need to look any further; we have the best and cost-effective dumpster in Camp Meeker. Most other dumpster rental companies offer very attractive prices, but this can be a trick – in the final bill you may see additional taxing and fees. We don’t do job accordingly – you pay exactly the amount you see in the initial quote. If you get in touch with us, you will get fast and reliable service as well as on-time pickup of your construction dumpster in Camp Meeker, CA. You can call us and our friendly staff will gladly assist you in choosing the most suitable dumpster for your project. You can choose several dumpsters of different size for some waste types; e.g. 10 square feet volume for metal scrap and 40 square feet volume for the lightweight garbage. We service the Camp Meeker area for 20 years, and we know what exactly every contractor, home owner and property manager needs. We also provide easy payment methods which include check, cash and major credit cards. We work closely with any client and we always base our price quote on the amount and type of material you will be putting on the containers. Our construction dumpsters in Camp Meeker, CA can be delivered to your location, residential driveway or anything in between (street, parking lots, etc.). We want to help you in growing your business!
#!/usr/bin/env python """ Copyright 2016 Aaron Stephens <aaronjst93@gmail.com> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # CPU Types, CPU Subtypes, Filetypes, Load Commands, Flags as defined in the # following official Apple, inc. header files: # /usr/include/mach/machine.h # /usr/include/mach-o/loader.h cert_slots = { -1: 'root', 0: 'leaf' } hashes = { 0: 'No Hash', 1: 'SHA-1', 2: 'SHA-256' } segment_flags = { 1: 'HIGHVM', 2: 'FVMLIB', 4: 'NORELOC', 8: 'PROTECTED_VERSION_1' } n_types = { 0x0: 'UNDF', 0x2: 'ABS', 0xe: 'SECT', 0xc: 'PBUD', 0xa: 'INDR' } machos = { 4277009102: (False, False), # 32 bit, big endian 4277009103: (True, False), # 64 bit, big endian 3472551422: (False, True), # 32 bit, little endian 3489328638: (True, True) # 64 bit, little endian } requirements = { 1: 'HostRequirementType', 2: 'GuestRequirementType', 3: 'DesignatedRequirementType', 4: 'LibraryRequirementType', 5: 'PluginRequirementType', } indeces = { 0: 'CodeDirectorySlot', 1: 'InfoSlot', 2: 'RequirementsSlot', 3: 'ResourceDirSlot', 4: 'ApplicationSlot', 5: 'EntitlementSlot', 0x10000: 'SignatureSlot' } matches = { 0: 'matchExists', 1: 'matchEqual', 2: 'matchContains', 3: 'matchBeginsWith', 4: 'matchEndsWith', 5: 'matchLessThan', 6: 'matchGreaterThan', 7: 'matchLessEqual', 8: 'matchGreaterEqual' } protections = { 0b000: '---', 0b001: 'r--', 0b010: '-w-', 0b011: 'rw-', 0b100: '--x', 0b101: 'r-x', 0b110: '-wx', 0b111: 'rwx' } signatures = { 'REQUIREMENT': 0xfade0c00, 'REQUIREMENTS': 0xfade0c01, 'CODEDIRECTORY': 0xfade0c02, 'ENTITLEMENT': 0xfade7171, 'BLOBWRAPPER': 0xfade0b01, 'EMBEDDED_SIGNATURE': 0xfade0cc0, 'DETACHED_SIGNATURE': 0xfade0cc1, 'CODE_SIGN_DRS': 0xfade0c05 } section_attrs = { 0x80000000: 'PURE_INSTRUCTIONS', 0x40000000: 'NO_TOC', 0x20000000: 'STRIP_STATIC_SYMS', 0x10000000: 'NO_DEAD_STRIP', 0x08000000: 'LIVE_SUPPORT', 0x04000000: 'SELF_MODIFYING_CODE', 0x02000000: 'DEBUG', 0x00000400: 'SOME_INSTRUCTIONS', 0x00000200: 'EXT_RELOC', 0x00000100: 'LOC_RELOC' } filetypes = { 1: 'OBJECT', 2: 'EXECUTE', 3: 'FVMLIB', 4: 'CORE', 5: 'PRELOAD', 6: 'DYLIB', 7: 'DYLINKER', 8: 'BUNDLE', 9: 'DYLIB_STUB', 10: 'DSYM', 11: 'KEXT_BUNDLE' } section_types = { 0x0: 'REGULAR', 0x1: 'ZEROFILL', 0x2: 'CSTRING_LITERALS', 0x3: '4BYTE_LITERALS', 0x4: '8BYTE_LITERALS', 0x5: 'LITERAL_POINTERS', 0x6: 'NON_LAZY_SYMBOL_POINTERS', 0x7: 'LAZY_SYMBOL_POINTERS', 0x8: 'SYMBOL_STUBS', 0x9: 'MOD_INIT_FUNC_POINTERS', 0xa: 'MOD_TERM_FUNC_POINTERS', 0xb: 'COALESCED', 0xc: 'GB_ZEROFILL', 0xd: 'INTERPOSING', 0xe: '16BYTE_LITERALS', 0xf: 'DTRACE_DOF', 0x10: 'LAZY_DYLIB_SYMBOL_POINTERS', 0x11: 'THREAD_LOCAL_REGULAR', 0x12: 'THREAD_LOCAL_ZEROFILL', 0x13: 'THREAD_LOCAL_VARIABLES', 0x14: 'THREAD_LOCAL_VARIABLE_POINTERS', 0x15: 'THREAD_LOCAL_INIT_FUNCTION_POINTERS' } operators = { 0: 'False', 1: 'True', 2: 'Ident', 3: 'AppleAnchor', 4: 'AnchorHash', 5: 'InfoKeyValue', 6: 'And', 7: 'Or', 8: 'CDHash', 9: 'Not', 10: 'InfoKeyField', 11: 'CertField', 12: 'TrustedCert', 13: 'TrustedCerts', 14: 'CertGeneric', 15: 'AppleGenericAnchor', 16: 'EntitlementField', 17: 'CertPolicy', 18: 'NamedAnchor', 19: 'NamedCode', 20: 'Platform' } thread_states = { 1: 'x86_THREAD_STATE32', 2: 'x86_FLOAT_STATE32', 3: 'x86_EXCEPTION_STATE32', 4: 'x86_THREAD_STATE64', 5: 'x86_FLOAT_STATE64', 6: 'x86_EXCEPTION_STATE64', 7: 'x86_THREAD_STATE', 8: 'x86_FLOAT_STATE', 9: 'x86_EXCEPTION_STATE', 10: 'x86_DEBUG_STATE32', 11: 'x86_DEBUG_STATE64', 12: 'x86_DEBUG_STATE', 13: 'THREAD_STATE_NONE', 14: 'x86_SAVED_STATE_1 (INTERNAL ONLY)', 15: 'x86_SAVED_STATE_2 (INTERNAL ONLY)', 16: 'x86_AVX_STATE32', 17: 'x86_AVX_STATE64', 18: 'x86_AVX_STATE' } flags = { 1: 'NOUNDEFS', 2: 'INCRLINK', 4: 'DYLDLINK', 8: 'BINDATLOAD', 16: 'PREBOUND', 32: 'SPLIT_SEGS', 64: 'LAZY_INIT', 128: 'TWOLEVEL', 256: 'FORCE_FLAT', 512: 'NOMULTIDEFS', 1024: 'NOFIXPREBINDING', 2048: 'PREBINDABLE', 4096: 'ALLMODSBOUND', 8192: 'SUBSECTIONS_VIA_SYMBOLS', 16384: 'CANONICAL', 32768: 'WEAK_DEFINES', 65536: 'BINDS_TO_WEAK', 131072: 'ALLOW_STACK_EXECUTION', 262144: 'ROOT_SAFE', 524288: 'SETUID_SAFE', 1048576: 'NOREEXPORTED_DYLIBS', 2097152: 'PIE', 4194304: 'DEAD_STRIPPABLE_DYLIB', 8388608: 'HAS_TLV_DESCRIPTORS', 16777216: 'NO_HEAP_EXECUTION', 33554432: 'APP_EXTENSION_SAFE' } stabs = { 0x20: 'GSYM', 0x22: 'FNAME', 0x24: 'FUN', 0x26: 'STSYM', 0x28: 'LCSYM', 0x2a: 'MAIN', 0x2e: 'BNSYM', 0x30: 'PC', 0x32: 'AST', 0x3a: 'MAC_UNDEF', 0x3c: 'OPT', 0x40: 'RSYM', 0x44: 'SLINE', 0x46: 'DSLINE', 0x48: 'BSLINE', 0x4e: 'ENSYM', 0x60: 'SSYM', 0x64: 'SO', 0x66: 'OSO', 0x80: 'LSYM', 0x82: 'BINCL', 0x84: 'SOL', 0x86: 'PARAMS', 0x88: 'VERSION', 0x8a: 'OLEVEL', 0xa0: 'PSYM', 0xa2: 'EINCL', 0xa4: 'ENTRY', 0xc0: 'LBRAC', 0xc2: 'EXCL', 0xe0: 'RBRAC', 0xe2: 'BCOMM', 0xe4: 'ECOMM', 0xe8: 'ECOML', 0xfe: 'LENG' } loadcommands = { 1: 'SEGMENT', 2: 'SYMTAB', 3: 'SYMSEG', 4: 'THREAD', 5: 'UNIXTHREAD', 6: 'LOADFVMLIB', 7: 'IDFVMLIB', 8: 'IDENT', 9: 'FVMFILE', 10: 'PREPAGE', 11: 'DYSYMTAB', 12: 'LOAD_DYLIB', 13: 'ID_DYLIB', 14: 'LOAD_DYLINKER', 15: 'ID_DYLINKER', 16: 'PREBOUND_DYLIB', 17: 'ROUTINES', 18: 'SUB_FRAMEWORK', 19: 'SUB_UMBRELLA', 20: 'SUB_CLIENT', 21: 'SUB_LIBRARY', 22: 'TWOLEVEL_HINTS', 23: 'PREBIND_CKSUM', 25: 'SEGMENT_64', 26: 'ROUTINES_64', 27: 'UUID', 29: 'CODE_SIGNATURE', 30: 'SEGMENT_SPLIT_INFO', 32: 'LAZY_LOAD_DYLIB', 33: 'ENCRYPTION_INFO', 34: 'DYLD_INFO', 36: 'VERSION_MIN_MACOSX', 37: 'VERSION_MIN_IPHONEOS', 38: 'FUNCTION_STARTS', 39: 'DYLD_ENVIRONMENT', 41: 'DATA_IN_CODE', 42: 'SOURCE_VERSION', 43: 'DYLIB_CODE_SIGN_DRS', 44: 'ENCRYPTION_INFO_64', 45: 'LINKER_OPTION', 46: 'LINKER_OPTIMIZATION_HINT', 47: 'VERSION_MIN_TVOS', 48: 'VERSION_MIN_WATCHOS', 49: 'NOTE', 50: 'BUILD_VERSION', 2147483672: 'LOAD_WEAK_DYLIB', 2147483676: 'RPATH', 2147483679: 'REEXPORT_DYLIB', 2147483682: 'DYLD_INFO_ONLY', 2147483683: 'LOAD_UPWARD_DYLIB', 2147483688: 'MAIN', } # CPU Types & Subtypes as defined in # http://opensource.apple.com/source/cctools/cctools-822/include/mach/machine.h cputypes = { -1: { -2: 'ANY', -1: 'MULTIPLE', 0: 'LITTLE_ENDIAN', 1: 'BIG_ENDIAN' }, 1: { -2: 'VAX', -1: 'MULTIPLE', 0: 'VAX_ALL', 1: 'VAX780', 2: 'VAX785', 3: 'VAX750', 4: 'VAX730', 5: 'UVAXI', 6: 'UVAXII', 7: 'VAX8200', 8: 'VAX8500', 9: 'VAX8600', 10: 'VAX8650', 11: 'VAX8800', 12: 'UVAXIII' }, 6: { -2: 'MC680x0', -1: 'MULTIPLE', 1: 'MC680x0_ALL or MC68030', 2: 'MC68040', 3: 'MC68030_ONLY' }, 7: {-2: 'X86 (I386)', -1: 'MULITPLE', 0: 'INTEL_MODEL_ALL', 3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386', 4: 'X86_ARCH1 or 486', 5: '586 or PENT', 8: 'X86_64_H or PENTIUM_3', 9: 'PENTIUM_M', 10: 'PENTIUM_4', 11: 'ITANIUM', 12: 'XEON', 15: 'INTEL_FAMILY_MAX', 22: 'PENTPRO', 24: 'PENTIUM_3_M', 26: 'PENTIUM_4_M', 27: 'ITANIUM_2', 28: 'XEON_MP', 40: 'PENTIUM_3_XEON', 54: 'PENTII_M3', 86: 'PENTII_M5', 103: 'CELERON', 119: 'CELERON_MOBILE', 132: '486SX' }, 10: { -2: 'MC98000', -1: 'MULTIPLE', 0: 'MC98000_ALL', 1: 'MC98601' }, 11: { -2: 'HPPA', -1: 'MULITPLE', 0: 'HPPA_ALL or HPPA_7100', 1: 'HPPA_7100LC' }, 12: { -2: 'ARM', -1: 'MULTIPLE', 0: 'ARM_ALL', 1: 'ARM_A500_ARCH', 2: 'ARM_A500', 3: 'ARM_A440', 4: 'ARM_M4', 5: 'ARM_V4T', 6: 'ARM_V6', 7: 'ARM_V5TEJ', 8: 'ARM_XSCALE', 9: 'ARM_V7', 10: 'ARM_V7F', 11: 'ARM_V7S', 12: 'ARM_V7K', 13: 'ARM_V8', 14: 'ARM_V6M', 15: 'ARM_V7M', 16: 'ARM_V7EM' }, 13: { -2: 'MC88000', -1: 'MULTIPLE', 0: 'MC88000_ALL', 1: 'MMAX_JPC or MC88100', 2: 'MC88110' }, 14: { -2: 'SPARC', -1: 'MULTIPLE', 0: 'SPARC_ALL or SUN4_ALL', 1: 'SUN4_260', 2: 'SUN4_110' }, 15: { -2: 'I860 (big-endian)', -1: 'MULTIPLE', 0: 'I860_ALL', 1: 'I860_860' }, 18: { -2: 'POWERPC', -1: 'MULTIPLE', 0: 'POWERPC_ALL', 1: 'POWERPC_601', 2: 'POWERPC_602', 3: 'POWERPC_603', 4: 'POWERPC_603e', 5: 'POWERPC_603ev', 6: 'POWERPC_604', 7: 'POWERPC_604e', 8: 'POWERPC_620', 9: 'POWERPC_750', 10: 'POWERPC_7400', 11: 'POWERPC_7450', 100: 'POWERPC_970' }, 16777223: { -2: 'X86_64', -1: 'MULTIPLE', 0: 'INTEL_MODEL_ALL', 3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386', 4: 'X86_ARCH1 or 486', 5: '586 or PENT', 8: 'X86_64_H or PENTIUM_3', 9: 'PENTIUM_M', 10: 'PENTIUM_4', 11: 'ITANIUM', 12: 'XEON', 15: 'INTEL_FAMILY_MAX', 22: 'PENTPRO', 24: 'PENTIUM_3_M', 26: 'PENTIUM_4_M', 27: 'ITANIUM_2', 28: 'XEON_MP', 40: 'PENTIUM_3_XEON', 54: 'PENTII_M3', 86: 'PENTII_M5', 103: 'CELERON', 119: 'CELERON_MOBILE', 132: '486SX', 2147483648 + 0: 'INTEL_MODEL_ALL', 2147483648 + 3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386', 2147483648 + 4: 'X86_ARCH1 or 486', 2147483648 + 5: '586 or PENT', 2147483648 + 8: 'X86_64_H or PENTIUM_3', 2147483648 + 9: 'PENTIUM_M', 2147483648 + 10: 'PENTIUM_4', 2147483648 + 11: 'ITANIUM', 2147483648 + 12: 'XEON', 2147483648 + 15: 'INTEL_FAMILY_MAX', 2147483648 + 22: 'PENTPRO', 2147483648 + 24: 'PENTIUM_3_M', 2147483648 + 26: 'PENTIUM_4_M', 2147483648 + 27: 'ITANIUM_2', 2147483648 + 28: 'XEON_MP', 2147483648 + 40: 'PENTIUM_3_XEON', 2147483648 + 54: 'PENTII_M3', 2147483648 + 86: 'PENTII_M5', 2147483648 + 103: 'CELERON', 2147483648 + 119: 'CELERON_MOBILE', 2147483648 + 132: '486SX' }, 16777228: { -2: 'ARM64', -1: 'MULTIPLE', 0: 'ARM64_ALL', 1: 'ARM64_V8', 2147483648 + 0: 'ARM64_ALL', 2147483648 + 1: 'ARM64_V8' }, 16777234: { -2: 'POWERPC64', -1: 'MULTIPLE', 0: 'POWERPC_ALL', 1: 'POWERPC_601', 2: 'POWERPC_602', 3: 'POWERPC_603', 4: 'POWERPC_603e', 5: 'POWERPC_603ev', 6: 'POWERPC_604', 7: 'POWERPC_604e', 8: 'POWERPC_620', 9: 'POWERPC_750', 10: 'POWERPC_7400', 11: 'POWERPC_7450', 100: 'POWERPC_970', 2147483648 + 0: 'POWERPC_ALL (LIB64)', 2147483648 + 1: 'POWERPC_601 (LIB64)', 2147483648 + 2: 'POWERPC_602 (LIB64)', 2147483648 + 3: 'POWERPC_603 (LIB64)', 2147483648 + 4: 'POWERPC_603e (LIB64)', 2147483648 + 5: 'POWERPC_603ev (LIB64)', 2147483648 + 6: 'POWERPC_604 (LIB64)', 2147483648 + 7: 'POWERPC_604e (LIB64)', 2147483648 + 8: 'POWERPC_620 (LIB64)', 2147483648 + 9: 'POWERPC_750 (LIB64)', 2147483648 + 10: 'POWERPC_7400 (LIB64)', 2147483648 + 11: 'POWERPC_7450 (LIB64)', 2147483648 + 100: 'POWERPC_970 (LIB64)' } }
Hell, I know that I have an abundance of sunglasses too but I am addicted to the new and shiny things of life. Shop with Zenni Optical Coupon, Save with Anycodes. Anycodes.com aims to make your shopping more enjoyable by collecting all active and working coupons and deals for you. That will accelerate the shift to online prescription glasses and eliminate. Learn about the Board of Directors, Executive Committees and CEO compensation in this industry. Zenni Optical Promo Code 50% Off 2019 Free Shipping Code. 50% off Zenni Optical is an online store that sells glasses for men, women and kids. I have a friend who was not satisfied, though, and is sure the prescription is wrong on her Zenni glasses.I was able to buy 3 pair of glasses (one with tinted lenses for sunglasses) for the same cost as a single pair locally.Express all facets of your personality with our low-priced glasses and sunglasses in every style, shape, and color imaginable.Zenni Optical Eyeglasses reviews: Worn out from attempting to buy Zenni glasses for the first time. After having a not so great experience buying glasses from my eye doctor, I heard about Zenni and decided to give it a try the next time I needed glasses. A wide variety of zenni optical options are available to you, such as stainless, alloy.Zenni Optical has all of the popular eyeglass frames for women at affordable prices.Milled has emails from ZenniOptical, including new arrivals, sales, discounts, and coupon codes.Before doing this cancellation procedure, you should check your profile first.The store is the place for customers that are willing to improve their eyesight in a stylish way at very affordable price. Zenni Womens Cat-Eye Prescription Glasses Gray Frame TR 2013412. A wide variety of zenni eyeglasses options are available to you, such as acetate, titanium, and stainless. At Zenni, we believe everyone should have access to high-quality, affordable eyewear. About 2 weeks ago I bought 4 pairs of eyeglasses from ZenniOptical.com and got them about a week. Additionally, you will need to know your PD (pupillary distance). Save on last-minute gifts with holiday sales and deals from Zenni Optical.Zenni Optical is an affordable place to purchase your prescription eyeglasses. Come to Zenni Optical and you will find prescription eyeglasses, sunglasses and eyewear of different styles, colors, shapes and meterial. Survey respondents who bought from Zenni spent a median of just.If you need prescription glasses, it will be necessary to have your eye prescription.By looking at Zenni Optical reviews and Zenni Optical complaints from shoppers around the country, our Zenni glasses review will help you determine whether Zenni prescription glasses are the best idea for your eye health—and your wallet. At Zenni Optical, we believe everyone should have access to high-quality, stylish eyewear.I paid for defective eyeglasses and they will not compensate. Accessories can make or break an outfit, and the most important accessory is the pair of shoes. We are going to keep the Zenni glasses as a back-up, and we may try ordering from them again in the future.Our trendy, not spendy eyewear allows you to express all sides of your personality without breaking the bank. Zenni Optical review with 3 Comments: I ordered twice from these folks.This prescription eyewear retailer lets customers order glasses online at the best value with Zenni Optical coupon codes and everyday glasses sales.About 96% of these are eyeglasses frames, 1% are eyeglasses lenses, and 1% are prisms.After I had my eyeglasses for about 40 days I noticed a crack in the lens. The process might be easier when we have a better idea of what styles of glasses work best for his face.
#!/usr/bin/env python # -*- coding: utf-8 -*- """ .. See the NOTICE file distributed with this work for additional information regarding copyright ownership. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import print_function import os.path import argparse import sys import tarfile import multiprocessing import json import shutil try: from urllib2 import urlopen except ImportError: from urllib.request import urlopen from random import random from string import ascii_letters as letters from basic_modules.workflow import Workflow from basic_modules.metadata import Metadata from utils import logger from utils import remap from tool.common import CommandLineParser from tool.common import format_utils from tool.tb_model import tbModelTool if '/opt/COMPSs/Bindings/python' in sys.path: sys.path.pop(sys.path.index('/opt/COMPSs/Bindings/python')) # ------------------------------------------------------------------------------ class tadbit_model(Workflow): # pylint: disable=invalid-name,too-few-public-methods """ Wrapper for the VRE form TADbit model. It has two main sections: - looks for optimal parameters for modeling a region - models a region for a given optimal parameters . """ configuration = {} def __init__(self, configuration=None): """ Initialise the tool with its configuration. Parameters ---------- configuration : dict a dictionary containing parameters that define how the operation should be carried out, which are specific to each Tool. """ tool_extra_config = json.load(open(os.path.dirname( os.path.abspath(__file__))+'/tadbit_wrappers_config.json')) os.environ["PATH"] += os.pathsep + format_utils.convert_from_unicode( tool_extra_config["bin_path"]) if configuration is None: configuration = {} self.configuration.update(format_utils.convert_from_unicode(configuration)) # Number of cores available num_cores = multiprocessing.cpu_count() self.configuration["ncpus"] = num_cores tmp_name = ''.join([letters[int(random()*52)]for _ in range(5)]) if 'execution' in self.configuration: self.configuration['project'] = self.configuration['execution'] self.configuration['workdir'] = self.configuration['project']+'/_tmp_tadbit_'+tmp_name if not os.path.exists(self.configuration['workdir']): os.makedirs(self.configuration['workdir']) self.configuration["optimize_only"] = "generation:num_mod_comp" not in self.configuration if "optimization:max_dist" in self.configuration and \ not self.configuration["optimize_only"]: del self.configuration["optimization:max_dist"] del self.configuration["optimization:upper_bound"] del self.configuration["optimization:lower_bound"] del self.configuration["optimization:cutoff"] self.configuration.update( {(key.split(':'))[-1]: val for key, val in self.configuration.items()} ) if self.configuration["gen_pos_chrom_name"] == 'all': self.configuration["gen_pos_chrom_name"] = "" self.configuration["gen_pos_begin"] = "" self.configuration["gen_pos_end"] = "" if "gen_pos_begin" not in self.configuration: self.configuration["gen_pos_begin"] = "" if "gen_pos_end" not in self.configuration: self.configuration["gen_pos_end"] = "" def run(self, input_files, metadata, output_files): """ Parameters ---------- files_ids : list List of file locations metadata : list Required meta data output_files : list List of output file locations Returns ------- outputfiles : list List of locations for the output bam files """ logger.info( "PROCESS MODEL - FILES PASSED TO TOOLS: {0}".format( str(input_files["hic_contacts_matrix_norm"])) ) m_results_meta = {} m_results_files = {} if "norm" in metadata['hic_contacts_matrix_norm'].meta_data: if metadata['hic_contacts_matrix_norm'].meta_data["norm"] != 'norm': clean_temps(self.configuration['workdir']) logger.fatal("Only normalized matrices can be used to build 3D models.\nExiting") raise ValueError('Missing normalized input matrix.') input_metadata = remap(self.configuration, "optimize_only", "gen_pos_chrom_name", "resolution", "gen_pos_begin", "gen_pos_end", "max_dist", "upper_bound", "lower_bound", "cutoff", "workdir", "project", "ncpus") in_files = [format_utils.convert_from_unicode(input_files['hic_contacts_matrix_norm'])] input_metadata["species"] = "Unknown" input_metadata["assembly"] = "Unknown" if "assembly" in metadata['hic_contacts_matrix_norm'].meta_data: input_metadata["assembly"] = metadata['hic_contacts_matrix_norm'].meta_data["assembly"] if metadata['hic_contacts_matrix_norm'].taxon_id: dt_json = json.load(urlopen( "http://www.ebi.ac.uk/ena/data/taxonomy/v1/taxon/tax-id/" + str(metadata['hic_contacts_matrix_norm'].taxon_id))) input_metadata["species"] = dt_json['scientificName'] input_metadata["num_mod_comp"] = self.configuration["num_mod_comp"] input_metadata["num_mod_keep"] = self.configuration["num_mod_keep"] tm_handler = tbModelTool() tm_files, _ = tm_handler.run(in_files, input_metadata, []) m_results_files["modeling_stats"] = self.configuration['project']+"/model_stats.tar.gz" tar = tarfile.open(m_results_files["modeling_stats"], "w:gz") tar.add(tm_files[0], arcname='modeling_files_and_stats') tar.close() if not self.configuration["optimize_only"]: m_results_files["tadkit_models"] = self.configuration['project'] + "/" + \ os.path.basename(tm_files[1]) os.rename(tm_files[1], m_results_files["tadkit_models"]) m_results_meta["tadkit_models"] = Metadata( data_type="chromatin_3dmodel_ensemble", file_type="JSON", file_path=m_results_files["tadkit_models"], sources=in_files, meta_data={ "description": "Ensemble of chromatin 3D structures", "visible": True, "assembly": input_metadata["assembly"] }, taxon_id=metadata['hic_contacts_matrix_norm'].taxon_id) # List of files to get saved logger.info("TADBIT RESULTS: " + ','.join( [str(m_results_files[k]) for k in m_results_files])) m_results_meta["modeling_stats"] = Metadata( data_type="tool_statistics", file_type="TAR", file_path=m_results_files["modeling_stats"], sources=in_files, meta_data={ "description": "TADbit modeling statistics and result files", "visible": True }) clean_temps(self.configuration['workdir']) return m_results_files, m_results_meta # ------------------------------------------------------------------------------ def main(args): """ Main function """ from apps.jsonapp import JSONApp app = JSONApp() result = app.launch(tadbit_model, args.config, args.in_metadata, args.out_metadata) return result def clean_temps(working_path): """Cleans the workspace from temporal folder and scratch files""" for the_file in os.listdir(working_path): file_path = os.path.join(working_path, the_file) try: if os.path.isfile(file_path): os.unlink(file_path) elif os.path.isdir(file_path): shutil.rmtree(file_path) except OSError: pass try: os.rmdir(working_path) except OSError: pass logger.info('[CLEANING] Finished') def make_absolute_path(files, root): """Make paths absolute.""" for role, path in files.items(): files[role] = os.path.join(root, path) return files # ------------------------------------------------------------------------------ if __name__ == "__main__": sys._run_from_cmdl = True # pylint: disable=protected-access # Set up the command line parameters PARSER = argparse.ArgumentParser(description="TADbit map") # Config file PARSER.add_argument("--config", help="Configuration JSON file", type=CommandLineParser.valid_file, metavar="config", required=True) # Metadata PARSER.add_argument("--in_metadata", help="Project metadata", metavar="in_metadata", required=True) # Output metadata PARSER.add_argument("--out_metadata", help="Output metadata", metavar="output_metadata", required=True) # Log file PARSER.add_argument("--log_file", help="Log file", metavar="log_file", required=True) IN_ARGS = PARSER.parse_args() RESULTS = main(IN_ARGS)
Posted on August 30, 2018 August 30, 2018 by CBS Kenya Ltd. Customer Visit Management System (CVM) is an innovative solution that handles the whole visit of your customers, even before they step into your branch, offering them a unique experience that far exceeds their expectations, in order to turn them into your loyal customers. The system grants you full control over your branches, bringing great value to both you and your customers. CVM solution consists of a set of integrated sub-systems that work together to help you manage your customer visit at a lower cost, in less time, and turns your customers visit to an opportunity to increase revenues. CBS Kenya has implemented the system in NIC Bank's branches across the country.
""" Encoding/decoding of communion messages message must be a dict containing: "mode": "request" or "response" "id": 32-bit identifier, should increase "content": None, bool, bytes, str, int, float, or tuple of str/int/float/bool remaining keys: anything JSON-serializable encoded message is binary, and consists of: header SEAMLESS tip: 0 for request, 1 for response identifier: 32-bit nrem: 32-bit, the length of the remaining keys buffer (after content) content: is_str byte + remainder. For is_str: 0: No remainder, message is None 1: bool. remainder is 0 or 1 2: bytes. remainder is raw content 3: str. remainder is UTF-8 encoded content 4: int/float/tuple. remainder is JSON-encoded content. rem: remaining keys buffer (JSON format) """ import numpy as np import json def communion_encode(msg): assert msg["mode"] in ("request", "response") m = 'SEAMLESS'.encode() tip = b'\x00' if msg["mode"] == "request" else b'\x01' m += tip m += np.uint32(msg["id"]).tobytes() remainder = msg.copy() remainder.pop("mode") remainder.pop("id") remainder.pop("content") if len(remainder.keys()): rem = json.dumps(remainder).encode() nrem = np.uint32(len(rem)).tobytes() m += nrem m += rem else: m += b'\x00\x00\x00\x00' content = msg["content"] if content is None: m += b'\x00' else: assert isinstance(content, (str, int, float, bytes, bool, tuple)), content if isinstance(content, bool): is_str = b'\x01' elif isinstance(content, (int, float, tuple)): is_str = b'\x04' else: is_str = b'\x03' if isinstance(content, str) else b'\x02' m += is_str if isinstance(content, str): content = content.encode() elif isinstance(content, bool): content = b'\x01' if content else b'\x00' elif isinstance(content, (int, float, tuple)): if isinstance(content, tuple): for item in content: assert item is None or isinstance(item, (str, int, float, bool)), type(item) content = json.dumps(content).encode() m += content assert communion_decode(m) == msg, (communion_decode(m), msg) return m def communion_decode(m): assert isinstance(m, bytes) message = {} head = 'SEAMLESS'.encode() assert m[:len(head)] == head m = m[len(head):] tip = m[:1] m = m[1:] assert tip == b'\x01' or tip == b'\x00', tip message["mode"] = "request" if tip == b'\x00' else "response" l1, l2 = m[:4], m[4:8] m = m[8:] message["id"] = np.frombuffer(l1,np.uint32)[0] nrem = np.frombuffer(l2,np.uint32)[0] if nrem: rem = m[:nrem] rem = rem.decode() rem = json.loads(rem) message.update(rem) m = m[nrem:] is_str = m[:1] if is_str == b'\x00': content = None elif is_str == b'\x01': content = True if m[1:] == b'\x01' else False elif is_str == b'\x04': content = json.loads(m[1:]) assert isinstance(content, (int, float, list)) if isinstance(content, list): for item in content: assert item is None or isinstance(item, (str, int, float, bool)), type(item) content = tuple(content) else: assert is_str == b'\x03' or is_str == b'\x02' content = m[1:] if is_str == b'\x03': content = content.decode() message["content"] = content return message
The e-commerce giant’s love affair with brick-and-mortar continues with a new store concept, Amazon 4-Star. The store, which opened its doors Thursday in Soho, is similar to Amazon Books, but with a range of products besides books stocked on its shelves–all of which must have a rating of at least 4 stars by Amazon customers, Forbes reported. Kitchen appliances, home gadgets, devices and electronics, toys and games–the store is stocked with popular items most people are likely to buy.
# coding=utf-8 __author__ = 'walthermaciel' from geopy.geocoders import DataBC from geopy.exc import GeopyError from time import sleep import sys from ssl import SSLError from create_feature_vector import create_vector import os import pandas as pd from sklearn.externals import joblib from sklearn.ensemble import RandomForestClassifier from sklearn.ensemble import RandomForestRegressor crime_id = {0: 'BNE Residential ', 1: 'Theft from Vehicle', 2: 'Other Thefts ', 3: 'Mischief ', 4: 'Theft of Vehicle ', 5: 'BNE Commercial '} def gather_time(): print 'Year:\t', year = sys.stdin.readline().strip() month_ok = False while not month_ok: print 'Month:\t', month = sys.stdin.readline().strip() if 12 >= int(month) > 0: month_ok = True else: print 'Nice try, champ...' return int(year), int(month) def gather_address(): print 'Street Number:\t', st_num = sys.stdin.readline().strip() print 'Street Name:\t', st_name = sys.stdin.readline().strip() address = st_num + ' ' + st_name + ', Vancouver, BC, Canada' return address def gather_lat_long(address): print 'Researching lat long for ' + address + '...' got_it = False delay = 1 while not got_it: if delay > 10: print 'could not find address, exiting...' exit() try: sleep(delay) location = geolocator.geocode(address) got_it = True except (GeopyError, SSLError) as e: delay *= 2 got_it = False print '!!! Are you sure you got the right address? Trying again...' print 'Got it!' latitude = "{:.8f}".format(location.latitude) longitude = "{:.8f}".format(location.longitude) print 'LatLong:\t( ' + latitude + ', ' + longitude + ' )' return location.latitude, location.longitude def run_demo(): os.system('clear') print ''' 888 888 .d8888b. d8b 888 888 d88P Y88b Y8P 888 888 888 888 Y88b d88P 8888b. 88888b. 888 888d888 888 88888b.d88b. .d88b. Y88b d88P "88b 888 "88b 888 888P" 888 888 "888 "88b d8P Y8b Y88o88P .d888888 888 888 888 888 888 888 888 888 888 88888888 Y888P 888 888 888 888 Y88b d88P 888 888 888 888 888 Y8b. Y8P "Y888888 888 888 "Y8888P" 888 888 888 888 888 "Y8888 ------------------ https://github.com/wmaciel/van-crime ----------------- ''' year, month = gather_time() address = gather_address() latitude, longitude = gather_lat_long(address) print 'Generating feature vector...', f_vec = create_vector(int(year), int(month), latitude, longitude) if isinstance(f_vec, int): print 'Failed' else: print 'OK' print 'Loading classification model...', clf = joblib.load('../models/random_forest_model.p') print 'OK' print 'Loading regression model...', reg = joblib.load('../models/RandomForestRegressor.p') print 'OK' print '\n\n----- Results -----' print 'Probability of crime type, given that a crime happened:' prob_list = clf.predict_proba(f_vec.as_matrix())[0] for i, p in enumerate(prob_list): print crime_id[i] + '\t' + "{:.2f}".format(p * 100) + '%' print '--------------------------\n' print 'Expected number of crimes to happen:' expected = reg.predict(f_vec.as_matrix())[0] print expected print '--------------------------\n' print 'Expected number of crimes to happen by type:' for i, p in enumerate(prob_list): print crime_id[i] + '\t' + "{:.2f}".format(p * expected) if __name__ == '__main__': geolocator = DataBC() while True: run_demo() print '\npress enter to reset' sys.stdin.readline()
Jan Schwartz is co-founder of Education and Training Solutions, a web-based e-learning company that produces online courses for those in the health and wellness fields. She has worked in education since 1988. I went to massage school to learn more about anatomy. I was a pretty active person and wanted to know more about how my body worked, but I had had enough of college and wanted a more fun way to learn. As it turns out, I also loved learning about massage and how it helps the body heal in certain ways. My background was in education and training so I did gravitate towards education after I was two years out of school. I was able to combine the two careers and I became an instructor and then an education director. I was fortunate in that the owner of the school I attended was big on volunteering. She steered me that direction. I was a committee chair for my local AMTA chapter, and then I became a Commissioner for the Commission on Massage Therapy Accreditation (COMTA), then the Chair of COMTA. I did some volunteer work for NCBTMB and then became a Trustee for the Massage Therapy Foundation. For two years I was the massage therapy ambassador to the Academy of Integrative Health and Medicine (AIHM) and currently I am on the board of the Academic Collaborative for Integrative Health (ACIH) and also a member of the Board of Trustees for the University of Western States in Oregon. Volunteering has been a big part of my career in massage therapy. I stay engaged through my volunteer work, which is a great learning environment as well. I also go to relevant conferences and am now most interested in inter-professional education. That is, education that brings healthcare practitioners of different fields closer together in how they work with patients/clients. When I was Chair of COMTA we received our initial approval from the U.S. Dept. of Education. That was a long time coming. By the time our renewal came around we had developed the first set of competencies for the field of massage therapy. As a group we did some excellent work, that still exists today. I am proud to have been a part of those processes. More recently, I am proud to have been a co-leader of the team that developed competencies for optimal inter-professional practice through the ACIH. The best part for me now is meeting like minded people in the fields of health and wellness; working on how to make our healthcare system more equitable, more affordable and more patient/client centered. Once you get your practice going, whether you practice solo, in a group or at a spa, and feel comfortable with yourself in your new career, I would strongly advise therapists to get involved in local organizations that promote health and wellness. It could be a massage organization, or any other organization that you think would benefit from the expertise you bring. Don’t think too narrowly—it’s a great way to network! We have to carry the message that massage has many benefits to help people optimize their health.

Dataset Card for "c4-code-10k"

10K elements of C4 and 10K elements of code parrot clean (Python code).

Note that these are the datasets used to train my interpretability-friendly models, but is not of the correct mixture. Those models were trained on 83% C4 and 17% Python Code (ish) by tokens. This dataset has 10K strings of each, and by tokens is about 22M of code and 5M of C4 (code is longer and harder to compress!)

More Information needed

Downloads last month
756
Edit dataset card