text
stringlengths
89
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
630
def _detect_eggs_in_folder(folder): """ Detect egg distributions located in the given folder. Only direct folder content is considered and subfolders are not searched recursively. """ eggs = {} for x in os.listdir(folder): zip = x.endswith(_zip_ext) if zip: root = x[:-len(_zip_ext)] egg = _Egg.NONE elif x.endswith(_egg_ext): root = x[:-len(_egg_ext)] if os.path.isdir(os.path.join(folder, x)): egg = _Egg.FOLDER else: egg = _Egg.FILE else: continue try: info = eggs[root] except KeyError: eggs[root] = _Egg(os.path.join(folder, root), egg, zip) else: if egg is not _Egg.NONE: info.set_egg(egg) if zip: info.set_zip() return eggs.values()
[ "def", "_detect_eggs_in_folder", "(", "folder", ")", ":", "eggs", "=", "{", "}", "for", "x", "in", "os", ".", "listdir", "(", "folder", ")", ":", "zip", "=", "x", ".", "endswith", "(", "_zip_ext", ")", "if", "zip", ":", "root", "=", "x", "[", ":", "-", "len", "(", "_zip_ext", ")", "]", "egg", "=", "_Egg", ".", "NONE", "elif", "x", ".", "endswith", "(", "_egg_ext", ")", ":", "root", "=", "x", "[", ":", "-", "len", "(", "_egg_ext", ")", "]", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "folder", ",", "x", ")", ")", ":", "egg", "=", "_Egg", ".", "FOLDER", "else", ":", "egg", "=", "_Egg", ".", "FILE", "else", ":", "continue", "try", ":", "info", "=", "eggs", "[", "root", "]", "except", "KeyError", ":", "eggs", "[", "root", "]", "=", "_Egg", "(", "os", ".", "path", ".", "join", "(", "folder", ",", "root", ")", ",", "egg", ",", "zip", ")", "else", ":", "if", "egg", "is", "not", "_Egg", ".", "NONE", ":", "info", ".", "set_egg", "(", "egg", ")", "if", "zip", ":", "info", ".", "set_zip", "(", ")", "return", "eggs", ".", "values", "(", ")" ]
28.53125
15.59375
def save_avatar(self, image): """Save an avatar as raw image, return new filename. :param image: The image that needs to be saved. """ path = current_app.config['AVATARS_SAVE_PATH'] filename = uuid4().hex + '_raw.png' image.save(os.path.join(path, filename)) return filename
[ "def", "save_avatar", "(", "self", ",", "image", ")", ":", "path", "=", "current_app", ".", "config", "[", "'AVATARS_SAVE_PATH'", "]", "filename", "=", "uuid4", "(", ")", ".", "hex", "+", "'_raw.png'", "image", ".", "save", "(", "os", ".", "path", ".", "join", "(", "path", ",", "filename", ")", ")", "return", "filename" ]
35.888889
12
def remove_accelerator(control, key): """ Removes an accelerator from control. control: The control to affect. key: The key to remove. """ key = str_to_key(key) t = _tables.get(control, []) for a in t: if a[:2] == key: t.remove(a) if t: _tables[control] = t else: del _tables[control] update_accelerators(control) return True return False
[ "def", "remove_accelerator", "(", "control", ",", "key", ")", ":", "key", "=", "str_to_key", "(", "key", ")", "t", "=", "_tables", ".", "get", "(", "control", ",", "[", "]", ")", "for", "a", "in", "t", ":", "if", "a", "[", ":", "2", "]", "==", "key", ":", "t", ".", "remove", "(", "a", ")", "if", "t", ":", "_tables", "[", "control", "]", "=", "t", "else", ":", "del", "_tables", "[", "control", "]", "update_accelerators", "(", "control", ")", "return", "True", "return", "False" ]
18.736842
17.473684
def printPi(self): """ Prints all states state and their steady state probabilities. Not recommended for large state spaces. """ assert self.pi is not None, "Calculate pi before calling printPi()" assert len(self.mapping)>0, "printPi() can only be used in combination with the direct or indirect method. Use print(mc.pi) if your subclass is called mc." for key,state in self.mapping.items(): print(state,self.pi[key])
[ "def", "printPi", "(", "self", ")", ":", "assert", "self", ".", "pi", "is", "not", "None", ",", "\"Calculate pi before calling printPi()\"", "assert", "len", "(", "self", ".", "mapping", ")", ">", "0", ",", "\"printPi() can only be used in combination with the direct or indirect method. Use print(mc.pi) if your subclass is called mc.\"", "for", "key", ",", "state", "in", "self", ".", "mapping", ".", "items", "(", ")", ":", "print", "(", "state", ",", "self", ".", "pi", "[", "key", "]", ")" ]
53.888889
25.888889
def get(self, name: str) -> Optional[ListEntry]: """Return the named entry in the list tree. Args: name: The entry name. """ parts = name.split(self._delimiter) try: node = self._find(self._root, *parts) except KeyError: return None else: marked = self._marked.get(name) return ListEntry(name, node.exists, marked, bool(node.children))
[ "def", "get", "(", "self", ",", "name", ":", "str", ")", "->", "Optional", "[", "ListEntry", "]", ":", "parts", "=", "name", ".", "split", "(", "self", ".", "_delimiter", ")", "try", ":", "node", "=", "self", ".", "_find", "(", "self", ".", "_root", ",", "*", "parts", ")", "except", "KeyError", ":", "return", "None", "else", ":", "marked", "=", "self", ".", "_marked", ".", "get", "(", "name", ")", "return", "ListEntry", "(", "name", ",", "node", ".", "exists", ",", "marked", ",", "bool", "(", "node", ".", "children", ")", ")" ]
29.266667
17.4
def modules(self): """(:class:`productmd.modules.Modules`) -- Compose Modules metadata""" if self._modules is not None: return self._modules paths = [ "metadata/modules.json", ] self._modules = self._load_metadata(paths, productmd.modules.Modules) return self._modules
[ "def", "modules", "(", "self", ")", ":", "if", "self", ".", "_modules", "is", "not", "None", ":", "return", "self", ".", "_modules", "paths", "=", "[", "\"metadata/modules.json\"", ",", "]", "self", ".", "_modules", "=", "self", ".", "_load_metadata", "(", "paths", ",", "productmd", ".", "modules", ".", "Modules", ")", "return", "self", ".", "_modules" ]
33.2
18
def CrearLiqSecundariaBase(self, pto_emision=1, nro_orden=None, nro_contrato=None, cuit_comprador=None, nro_ing_bruto_comprador=None, cod_puerto=None, des_puerto_localidad=None, cod_grano=None, cantidad_tn=None, cuit_vendedor=None, nro_act_vendedor=None, # nuevo!! nro_ing_bruto_vendedor=None, actua_corredor=None, liquida_corredor=None, cuit_corredor=None, nro_ing_bruto_corredor=None, fecha_precio_operacion=None, precio_ref_tn=None, precio_operacion=None, alic_iva_operacion=None, campania_ppal=None, cod_localidad_procedencia=None, cod_prov_procedencia=None, datos_adicionales=None, **kwargs): "Inicializa los datos de una liquidaci贸n secundaria de granos (base)" # creo el diccionario con los campos generales de la liquidaci贸n: self.liquidacion = dict( ptoEmision=pto_emision, nroOrden=nro_orden, numeroContrato=nro_contrato or None, cuitComprador=cuit_comprador, nroIngBrutoComprador=nro_ing_bruto_comprador, codPuerto=cod_puerto, desPuertoLocalidad=des_puerto_localidad, codGrano=cod_grano, cantidadTn=cantidad_tn, cuitVendedor=cuit_vendedor, nroActVendedor=nro_act_vendedor, nroIngBrutoVendedor=nro_ing_bruto_vendedor, actuaCorredor=actua_corredor, liquidaCorredor=liquida_corredor, cuitCorredor=cuit_corredor or None, nroIngBrutoCorredor=nro_ing_bruto_corredor or None, fechaPrecioOperacion=fecha_precio_operacion, precioRefTn=precio_ref_tn, precioOperacion=precio_operacion, alicIvaOperacion=alic_iva_operacion or None, campaniaPPal=campania_ppal, codLocalidad=cod_localidad_procedencia, codProvincia=cod_prov_procedencia, datosAdicionales=datos_adicionales, ) # inicializo las listas que contentran las retenciones y deducciones: self.deducciones = [] self.percepciones = [] self.opcionales = [] self.factura_papel = None return True
[ "def", "CrearLiqSecundariaBase", "(", "self", ",", "pto_emision", "=", "1", ",", "nro_orden", "=", "None", ",", "nro_contrato", "=", "None", ",", "cuit_comprador", "=", "None", ",", "nro_ing_bruto_comprador", "=", "None", ",", "cod_puerto", "=", "None", ",", "des_puerto_localidad", "=", "None", ",", "cod_grano", "=", "None", ",", "cantidad_tn", "=", "None", ",", "cuit_vendedor", "=", "None", ",", "nro_act_vendedor", "=", "None", ",", "# nuevo!!", "nro_ing_bruto_vendedor", "=", "None", ",", "actua_corredor", "=", "None", ",", "liquida_corredor", "=", "None", ",", "cuit_corredor", "=", "None", ",", "nro_ing_bruto_corredor", "=", "None", ",", "fecha_precio_operacion", "=", "None", ",", "precio_ref_tn", "=", "None", ",", "precio_operacion", "=", "None", ",", "alic_iva_operacion", "=", "None", ",", "campania_ppal", "=", "None", ",", "cod_localidad_procedencia", "=", "None", ",", "cod_prov_procedencia", "=", "None", ",", "datos_adicionales", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# creo el diccionario con los campos generales de la liquidaci贸n:", "self", ".", "liquidacion", "=", "dict", "(", "ptoEmision", "=", "pto_emision", ",", "nroOrden", "=", "nro_orden", ",", "numeroContrato", "=", "nro_contrato", "or", "None", ",", "cuitComprador", "=", "cuit_comprador", ",", "nroIngBrutoComprador", "=", "nro_ing_bruto_comprador", ",", "codPuerto", "=", "cod_puerto", ",", "desPuertoLocalidad", "=", "des_puerto_localidad", ",", "codGrano", "=", "cod_grano", ",", "cantidadTn", "=", "cantidad_tn", ",", "cuitVendedor", "=", "cuit_vendedor", ",", "nroActVendedor", "=", "nro_act_vendedor", ",", "nroIngBrutoVendedor", "=", "nro_ing_bruto_vendedor", ",", "actuaCorredor", "=", "actua_corredor", ",", "liquidaCorredor", "=", "liquida_corredor", ",", "cuitCorredor", "=", "cuit_corredor", "or", "None", ",", "nroIngBrutoCorredor", "=", "nro_ing_bruto_corredor", "or", "None", ",", "fechaPrecioOperacion", "=", "fecha_precio_operacion", ",", "precioRefTn", "=", "precio_ref_tn", ",", "precioOperacion", "=", "precio_operacion", ",", "alicIvaOperacion", "=", "alic_iva_operacion", "or", "None", ",", "campaniaPPal", "=", "campania_ppal", ",", "codLocalidad", "=", "cod_localidad_procedencia", ",", "codProvincia", "=", "cod_prov_procedencia", ",", "datosAdicionales", "=", "datos_adicionales", ",", ")", "# inicializo las listas que contentran las retenciones y deducciones:", "self", ".", "deducciones", "=", "[", "]", "self", ".", "percepciones", "=", "[", "]", "self", ".", "opcionales", "=", "[", "]", "self", ".", "factura_papel", "=", "None", "return", "True" ]
51.380952
19.428571
def create_user(self, username, password, tags=""): """ Creates a user. :param string username: The name to give to the new user :param string password: Password for the new user :param string tags: Comma-separated list of tags for the user :returns: boolean """ path = Client.urls['users_by_name'] % username body = json.dumps({'password': password, 'tags': tags}) return self._call(path, 'PUT', body=body, headers=Client.json_headers)
[ "def", "create_user", "(", "self", ",", "username", ",", "password", ",", "tags", "=", "\"\"", ")", ":", "path", "=", "Client", ".", "urls", "[", "'users_by_name'", "]", "%", "username", "body", "=", "json", ".", "dumps", "(", "{", "'password'", ":", "password", ",", "'tags'", ":", "tags", "}", ")", "return", "self", ".", "_call", "(", "path", ",", "'PUT'", ",", "body", "=", "body", ",", "headers", "=", "Client", ".", "json_headers", ")" ]
41.384615
16.923077
def plotcommand(cosmology='WMAP5', plotname=None): """ Example ways to interrogate the dataset and plot the commah output """ # Plot the c-M relation as a functon of redshift xarray = 10**(np.arange(1, 15, 0.2)) yval = 'c' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass (M$_{sol}$)" ytitle = r"Concentration" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) plt.ylim([2, 30]) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray) # Access the column yval from the data file yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+str(zval), color=colors[zind]) # Overplot the D08 predictions in black ax.plot(xarray, commah.commah.cduffy(zval, xarray), color="black") ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_CM_relation.png'" % (plotname)) fig.savefig(plotname+"_CM_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the c-z relation as a function of mass (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'c' # Specify the mass range zarray = 10**np.arange(6, 14, 2) xtitle = r"Redshift" ytitle = r"NFW Concentration" linelabel = r"log$_{10}$ M$_{z}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval) # Access the column yval from the data file yarray = output[yval].flatten() # Plot each line in turn with different colours ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_Cz_relation.png'" % (plotname)) fig.savefig(plotname+"_Cz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the zf-z relation for different masses (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'zf' # Specify the mass range zarray = 10**np.arange(6, 14, 2) xtitle = r"Redshift" ytitle = r"Formation Redshift" linelabel = r"log$_{10}$ M$_{z}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_zfz_relation.png'" % (plotname)) fig.savefig(plotname+"_zfz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the dM/dt-z relation for different masses (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'dMdt' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"log$_{10}$ (1+z)" ytitle = r"log$_{10}$ Accretion Rate M$_{sol}$ yr$^{-1}$" linelabel = r"log$_{10}$ M$_z$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) cosmo = commah.getcosmo(cosmology) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval, com=False, mah=True) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(np.log10(xarray+1.), np.log10(yarray), label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) # Plot the semi-analytic approximate formula from Correa et al 2015b semianalytic_approx = 71.6 * (zval / 1e12) * (cosmo['h'] / 0.7) *\ (-0.24 + 0.75 * (xarray + 1)) * np.sqrt( cosmo['omega_M_0'] * (xarray + 1)**3 + cosmo['omega_lambda_0']) ax.plot(np.log10(xarray + 1), np.log10(semianalytic_approx), color='black') leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_dMdtz_relation.png'" % (plotname)) fig.savefig(plotname+"_dMdtz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the dMdt-M relation as a function of redshift xarray = 10**(np.arange(10, 14, 0.5)) yval = 'dMdt' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass M$_{sol}$" ytitle = r"Accretion Rate M$_{sol}$ yr$^{-1}$" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray, com=False, mah=True) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+str(zval), color=colors[zind],) ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_MAH_M_relation.png'" % (plotname)) fig.savefig(plotname+"_MAH_M_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the (dM/M)dt-M relation as a function of redshift xarray = 10**(np.arange(10, 14, 0.5)) yval = 'dMdt' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass M$_{sol}$" ytitle = r"Specific Accretion Rate yr$^{-1}$" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray, mah=True, com=False) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray/xarray, label=linelabel+str(zval), color=colors[zind],) ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_specificMAH_M_relation.png'" % (plotname)) fig.savefig(plotname+"_specificMAH_M_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the Mz-z relation as a function of mass # (so mass is decreasing to zero as z-> inf) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'Mz' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"Redshift" ytitle = r"M(z) (M$_{sol}$)" linelabel = r"log$_{10}$ M$_{0}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_Mzz_relation.png'" % (plotname)) fig.savefig(plotname+"_Mzz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the Mz/M0-z relation as a function of mass xarray = 10**(np.arange(0, 1, 0.02)) - 1 yval = 'Mz' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"Redshift" ytitle = r"log$_{10}$ M(z)/M$_{0}$" linelabel = r"log$_{10}$ M$_{0}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, np.log10(yarray/zval), label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=3) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_MzM0z_relation.png'" % (plotname)) fig.savefig(plotname+"_MzM0z_relation.png", dpi=fig.dpi*5) else: plt.show() return("Done")
[ "def", "plotcommand", "(", "cosmology", "=", "'WMAP5'", ",", "plotname", "=", "None", ")", ":", "# Plot the c-M relation as a functon of redshift", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "1", ",", "15", ",", "0.2", ")", ")", "yval", "=", "'c'", "# Specify the redshift range", "zarray", "=", "np", ".", "arange", "(", "0", ",", "5", ",", "0.5", ")", "xtitle", "=", "r\"Halo Mass (M$_{sol}$)\"", "ytitle", "=", "r\"Concentration\"", "linelabel", "=", "\"z=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "plt", ".", "ylim", "(", "[", "2", ",", "30", "]", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zval", ",", "Mi", "=", "xarray", ")", "# Access the column yval from the data file", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "str", "(", "zval", ")", ",", "color", "=", "colors", "[", "zind", "]", ")", "# Overplot the D08 predictions in black", "ax", ".", "plot", "(", "xarray", ",", "commah", ".", "commah", ".", "cduffy", "(", "zval", ",", "xarray", ")", ",", "color", "=", "\"black\"", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "1", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_CM_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_CM_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the c-z relation as a function of mass (so always Mz=M0)", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.05", ")", ")", "-", "1", "yval", "=", "'c'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "6", ",", "14", ",", "2", ")", "xtitle", "=", "r\"Redshift\"", "ytitle", "=", "r\"NFW Concentration\"", "linelabel", "=", "r\"log$_{10}$ M$_{z}$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "xarray", ",", "Mi", "=", "zval", ")", "# Access the column yval from the data file", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colours", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "1", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_Cz_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_Cz_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the zf-z relation for different masses (so always Mz=M0)", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.05", ")", ")", "-", "1", "yval", "=", "'zf'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "6", ",", "14", ",", "2", ")", "xtitle", "=", "r\"Redshift\"", "ytitle", "=", "r\"Formation Redshift\"", "linelabel", "=", "r\"log$_{10}$ M$_{z}$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "xarray", ",", "Mi", "=", "zval", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "2", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_zfz_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_zfz_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the dM/dt-z relation for different masses (so always Mz=M0)", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.05", ")", ")", "-", "1", "yval", "=", "'dMdt'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", "xtitle", "=", "r\"log$_{10}$ (1+z)\"", "ytitle", "=", "r\"log$_{10}$ Accretion Rate M$_{sol}$ yr$^{-1}$\"", "linelabel", "=", "r\"log$_{10}$ M$_z$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "cosmo", "=", "commah", ".", "getcosmo", "(", "cosmology", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "xarray", ",", "Mi", "=", "zval", ",", "com", "=", "False", ",", "mah", "=", "True", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "np", ".", "log10", "(", "xarray", "+", "1.", ")", ",", "np", ".", "log10", "(", "yarray", ")", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "# Plot the semi-analytic approximate formula from Correa et al 2015b", "semianalytic_approx", "=", "71.6", "*", "(", "zval", "/", "1e12", ")", "*", "(", "cosmo", "[", "'h'", "]", "/", "0.7", ")", "*", "(", "-", "0.24", "+", "0.75", "*", "(", "xarray", "+", "1", ")", ")", "*", "np", ".", "sqrt", "(", "cosmo", "[", "'omega_M_0'", "]", "*", "(", "xarray", "+", "1", ")", "**", "3", "+", "cosmo", "[", "'omega_lambda_0'", "]", ")", "ax", ".", "plot", "(", "np", ".", "log10", "(", "xarray", "+", "1", ")", ",", "np", ".", "log10", "(", "semianalytic_approx", ")", ",", "color", "=", "'black'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "2", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_dMdtz_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_dMdtz_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the dMdt-M relation as a function of redshift", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", ")", "yval", "=", "'dMdt'", "# Specify the redshift range", "zarray", "=", "np", ".", "arange", "(", "0", ",", "5", ",", "0.5", ")", "xtitle", "=", "r\"Halo Mass M$_{sol}$\"", "ytitle", "=", "r\"Accretion Rate M$_{sol}$ yr$^{-1}$\"", "linelabel", "=", "\"z=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zval", ",", "Mi", "=", "xarray", ",", "com", "=", "False", ",", "mah", "=", "True", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "str", "(", "zval", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "2", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_MAH_M_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_MAH_M_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the (dM/M)dt-M relation as a function of redshift", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", ")", "yval", "=", "'dMdt'", "# Specify the redshift range", "zarray", "=", "np", ".", "arange", "(", "0", ",", "5", ",", "0.5", ")", "xtitle", "=", "r\"Halo Mass M$_{sol}$\"", "ytitle", "=", "r\"Specific Accretion Rate yr$^{-1}$\"", "linelabel", "=", "\"z=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zval", ",", "Mi", "=", "xarray", ",", "mah", "=", "True", ",", "com", "=", "False", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", "/", "xarray", ",", "label", "=", "linelabel", "+", "str", "(", "zval", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "1", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_specificMAH_M_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_specificMAH_M_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the Mz-z relation as a function of mass", "# (so mass is decreasing to zero as z-> inf)", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.05", ")", ")", "-", "1", "yval", "=", "'Mz'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", "xtitle", "=", "r\"Redshift\"", "ytitle", "=", "r\"M(z) (M$_{sol}$)\"", "linelabel", "=", "r\"log$_{10}$ M$_{0}$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "0", ",", "Mi", "=", "zval", ",", "z", "=", "xarray", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "1", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_Mzz_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_Mzz_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the Mz/M0-z relation as a function of mass", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.02", ")", ")", "-", "1", "yval", "=", "'Mz'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", "xtitle", "=", "r\"Redshift\"", "ytitle", "=", "r\"log$_{10}$ M(z)/M$_{0}$\"", "linelabel", "=", "r\"log$_{10}$ M$_{0}$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "0", ",", "Mi", "=", "zval", ",", "z", "=", "xarray", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "np", ".", "log10", "(", "yarray", "/", "zval", ")", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "3", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_MzM0z_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_MzM0z_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "return", "(", "\"Done\"", ")" ]
30.606952
19.371658
def kwargs(self): """Returns a dict of the kwargs for this Struct which were not interpreted by the baseclass. This excludes fields like `extends`, `merges`, and `abstract`, which are consumed by SerializableFactory.create and Validatable.validate. """ return {k: v for k, v in self._kwargs.items() if k not in self._INTERNAL_FIELDS}
[ "def", "kwargs", "(", "self", ")", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "self", ".", "_kwargs", ".", "items", "(", ")", "if", "k", "not", "in", "self", ".", "_INTERNAL_FIELDS", "}" ]
49.714286
24.428571
def requirement(self) -> FetchRequirement: """Indicates the data required to fulfill this search key.""" key_name = self.key if key_name == b'ALL': return FetchRequirement.NONE elif key_name == b'KEYSET': keyset_reqs = {key.requirement for key in self.filter_key_set} return FetchRequirement.reduce(keyset_reqs) elif key_name == b'OR': left, right = self.filter_key_or key_or_reqs = {left.requirement, right.requirement} return FetchRequirement.reduce(key_or_reqs) elif key_name in (b'SENTBEFORE', b'SENTON', b'SENTSINCE', b'BCC', b'CC', b'FROM', b'SUBJECT', b'TO', b'HEADER'): return FetchRequirement.HEADERS elif key_name in (b'BODY', b'TEXT', b'LARGER', b'SMALLER'): return FetchRequirement.BODY else: return FetchRequirement.METADATA
[ "def", "requirement", "(", "self", ")", "->", "FetchRequirement", ":", "key_name", "=", "self", ".", "key", "if", "key_name", "==", "b'ALL'", ":", "return", "FetchRequirement", ".", "NONE", "elif", "key_name", "==", "b'KEYSET'", ":", "keyset_reqs", "=", "{", "key", ".", "requirement", "for", "key", "in", "self", ".", "filter_key_set", "}", "return", "FetchRequirement", ".", "reduce", "(", "keyset_reqs", ")", "elif", "key_name", "==", "b'OR'", ":", "left", ",", "right", "=", "self", ".", "filter_key_or", "key_or_reqs", "=", "{", "left", ".", "requirement", ",", "right", ".", "requirement", "}", "return", "FetchRequirement", ".", "reduce", "(", "key_or_reqs", ")", "elif", "key_name", "in", "(", "b'SENTBEFORE'", ",", "b'SENTON'", ",", "b'SENTSINCE'", ",", "b'BCC'", ",", "b'CC'", ",", "b'FROM'", ",", "b'SUBJECT'", ",", "b'TO'", ",", "b'HEADER'", ")", ":", "return", "FetchRequirement", ".", "HEADERS", "elif", "key_name", "in", "(", "b'BODY'", ",", "b'TEXT'", ",", "b'LARGER'", ",", "b'SMALLER'", ")", ":", "return", "FetchRequirement", ".", "BODY", "else", ":", "return", "FetchRequirement", ".", "METADATA" ]
48.263158
13.473684
def isSameTypeWith(self, other, matchTags=True, matchConstraints=True): """Examine |ASN.1| type for equality with other ASN.1 type. ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints (:py:mod:`~pyasn1.type.constraint`) are examined when carrying out ASN.1 types comparison. Python class inheritance relationship is NOT considered. Parameters ---------- other: a pyasn1 type object Class instance representing ASN.1 type. Returns ------- : :class:`bool` :class:`True` if *other* is |ASN.1| type, :class:`False` otherwise. """ return (self is other or (not matchTags or self.tagSet == other.tagSet) and (not matchConstraints or self.subtypeSpec == other.subtypeSpec))
[ "def", "isSameTypeWith", "(", "self", ",", "other", ",", "matchTags", "=", "True", ",", "matchConstraints", "=", "True", ")", ":", "return", "(", "self", "is", "other", "or", "(", "not", "matchTags", "or", "self", ".", "tagSet", "==", "other", ".", "tagSet", ")", "and", "(", "not", "matchConstraints", "or", "self", ".", "subtypeSpec", "==", "other", ".", "subtypeSpec", ")", ")" ]
35.826087
21.304348
def from_file(filename, section='matrix'): """ Generate a matrix from a .ini file. Configuration is expected to be in a ``[matrix]`` section. """ config = parse_config(open(filename), section=section) return from_config(config)
[ "def", "from_file", "(", "filename", ",", "section", "=", "'matrix'", ")", ":", "config", "=", "parse_config", "(", "open", "(", "filename", ")", ",", "section", "=", "section", ")", "return", "from_config", "(", "config", ")" ]
40.333333
14.666667
def _get_globals(): """Return current Python interpreter globals namespace""" if _get_globals_callback is not None: return _get_globals_callback() else: try: from __main__ import __dict__ as namespace except ImportError: try: # The import fails on IronPython import __main__ namespace = __main__.__dict__ except: namespace shell = namespace.get('__ipythonshell__') if shell is not None and hasattr(shell, 'user_ns'): # IPython 0.12+ kernel return shell.user_ns else: # Python interpreter return namespace return namespace
[ "def", "_get_globals", "(", ")", ":", "if", "_get_globals_callback", "is", "not", "None", ":", "return", "_get_globals_callback", "(", ")", "else", ":", "try", ":", "from", "__main__", "import", "__dict__", "as", "namespace", "except", "ImportError", ":", "try", ":", "# The import fails on IronPython", "import", "__main__", "namespace", "=", "__main__", ".", "__dict__", "except", ":", "namespace", "shell", "=", "namespace", ".", "get", "(", "'__ipythonshell__'", ")", "if", "shell", "is", "not", "None", "and", "hasattr", "(", "shell", ",", "'user_ns'", ")", ":", "# IPython 0.12+ kernel", "return", "shell", ".", "user_ns", "else", ":", "# Python interpreter", "return", "namespace", "return", "namespace" ]
32.545455
13.5
def add_contacts(self, indices, indices2=None, threshold=0.3, periodic=True, count_contacts=False): r""" Adds the contacts to the feature list. Parameters ---------- indices : can be of two types: ndarray((n, 2), dtype=int): n x 2 array with the pairs of atoms between which the contacts shall be computed iterable of integers (either list or ndarray(n, dtype=int)): indices (not pairs of indices) of the atoms between which the contacts shall be computed. indices2: iterable of integers (either list or ndarray(n, dtype=int)), optional: Only has effect if :py:obj:`indices` is an iterable of integers. Instead of the above behaviour, only the contacts between the atoms in :py:obj:`indices` and :py:obj:`indices2` will be computed. threshold : float, optional, default = .3 distances below this threshold (in nm) will result in a feature 1.0, distances above will result in 0.0. The default is set to .3 nm (3 Angstrom) periodic : boolean, default True use the minimum image convention if unitcell information is available count_contacts : boolean, default False If set to true, this feature will return the number of formed contacts (and not feature values with either 1.0 or 0) The ouput of this feature will be of shape (Nt,1), and not (Nt, nr_of_contacts) .. note:: When using the *iterable of integers* input, :py:obj:`indices` and :py:obj:`indices2` will be sorted numerically and made unique before converting them to a pairlist. Please look carefully at the output of :py:func:`describe()` to see what features exactly have been added. """ from .distances import ContactFeature atom_pairs = _parse_pairwise_input( indices, indices2, self.logger, fname='add_contacts()') atom_pairs = self._check_indices(atom_pairs) f = ContactFeature(self.topology, atom_pairs, threshold, periodic, count_contacts) self.__add_feature(f)
[ "def", "add_contacts", "(", "self", ",", "indices", ",", "indices2", "=", "None", ",", "threshold", "=", "0.3", ",", "periodic", "=", "True", ",", "count_contacts", "=", "False", ")", ":", "from", ".", "distances", "import", "ContactFeature", "atom_pairs", "=", "_parse_pairwise_input", "(", "indices", ",", "indices2", ",", "self", ".", "logger", ",", "fname", "=", "'add_contacts()'", ")", "atom_pairs", "=", "self", ".", "_check_indices", "(", "atom_pairs", ")", "f", "=", "ContactFeature", "(", "self", ".", "topology", ",", "atom_pairs", ",", "threshold", ",", "periodic", ",", "count_contacts", ")", "self", ".", "__add_feature", "(", "f", ")" ]
51.071429
35.285714
def dumpf(obj, path): """ Write an nginx configuration to file. :param obj obj: nginx object (Conf, Server, Container) :param str path: path to nginx configuration on disk :returns: path the configuration was written to """ with open(path, 'w') as f: dump(obj, f) return path
[ "def", "dumpf", "(", "obj", ",", "path", ")", ":", "with", "open", "(", "path", ",", "'w'", ")", "as", "f", ":", "dump", "(", "obj", ",", "f", ")", "return", "path" ]
27.818182
14.545455
def list_from_args(args): """ Flatten list of args So as to accept either an array Or as many arguments For example: func(['x', 'y']) func('x', 'y') """ # Empty args if not args: return [] # Get argument type arg_type = type(args[0]) is_list = arg_type in LIST_TYPES # Check that the arguments are uniforn (of same type) same_type = all([ isinstance(arg, arg_type) for arg in args ]) if not same_type: raise Exception('Expected uniform arguments of same type !') # Flatten iterables # ['x', 'y'], ... if is_list: args_lists = map(list, args) flattened_args = sum(args_lists, []) return flattened_args # Flatten set # 'x', 'y' return list(args)
[ "def", "list_from_args", "(", "args", ")", ":", "# Empty args", "if", "not", "args", ":", "return", "[", "]", "# Get argument type", "arg_type", "=", "type", "(", "args", "[", "0", "]", ")", "is_list", "=", "arg_type", "in", "LIST_TYPES", "# Check that the arguments are uniforn (of same type)", "same_type", "=", "all", "(", "[", "isinstance", "(", "arg", ",", "arg_type", ")", "for", "arg", "in", "args", "]", ")", "if", "not", "same_type", ":", "raise", "Exception", "(", "'Expected uniform arguments of same type !'", ")", "# Flatten iterables", "# ['x', 'y'], ...", "if", "is_list", ":", "args_lists", "=", "map", "(", "list", ",", "args", ")", "flattened_args", "=", "sum", "(", "args_lists", ",", "[", "]", ")", "return", "flattened_args", "# Flatten set", "# 'x', 'y'", "return", "list", "(", "args", ")" ]
21.742857
19.171429
def p_file_lics_info_1(self, p): """file_lics_info : FILE_LICS_INFO file_lic_info_value""" try: self.builder.set_file_license_in_file(self.document, p[2]) except OrderError: self.order_error('LicenseInfoInFile', 'FileName', p.lineno(1)) except SPDXValueError: self.error = True msg = ERROR_MESSAGES['FILE_LICS_INFO_VALUE'].format(p.lineno(1)) self.logger.log(msg)
[ "def", "p_file_lics_info_1", "(", "self", ",", "p", ")", ":", "try", ":", "self", ".", "builder", ".", "set_file_license_in_file", "(", "self", ".", "document", ",", "p", "[", "2", "]", ")", "except", "OrderError", ":", "self", ".", "order_error", "(", "'LicenseInfoInFile'", ",", "'FileName'", ",", "p", ".", "lineno", "(", "1", ")", ")", "except", "SPDXValueError", ":", "self", ".", "error", "=", "True", "msg", "=", "ERROR_MESSAGES", "[", "'FILE_LICS_INFO_VALUE'", "]", ".", "format", "(", "p", ".", "lineno", "(", "1", ")", ")", "self", ".", "logger", ".", "log", "(", "msg", ")" ]
44.6
17.9
def get_assessment_session_for_bank(self, bank_id, proxy): """Gets an ``AssessmentSession`` which is responsible for performing assessments for the given bank ``Id``. arg: bank_id (osid.id.Id): the ``Id`` of a bank arg: proxy (osid.proxy.Proxy): a proxy return: (osid.assessment.AssessmentSession) - an assessment session for this service raise: NotFound - ``bank_id`` not found raise: NullArgument - ``bank_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_assessment()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_assessment()`` is ``true``.* """ if not self.supports_assessment(): raise errors.Unimplemented() ## # Also include check to see if the catalog Id is found otherwise raise errors.NotFound ## # pylint: disable=no-member return sessions.AssessmentSession(bank_id, proxy, self._runtime)
[ "def", "get_assessment_session_for_bank", "(", "self", ",", "bank_id", ",", "proxy", ")", ":", "if", "not", "self", ".", "supports_assessment", "(", ")", ":", "raise", "errors", ".", "Unimplemented", "(", ")", "##", "# Also include check to see if the catalog Id is found otherwise raise errors.NotFound", "##", "# pylint: disable=no-member", "return", "sessions", ".", "AssessmentSession", "(", "bank_id", ",", "proxy", ",", "self", ".", "_runtime", ")" ]
48.181818
19.272727
def payment_end(self, account, wallet): """ End a payment session. Marks the account as available for use in a payment session. :param account: Account to mark available :type account: str :param wallet: Wallet to end payment session for :type wallet: str :raises: :py:exc:`nano.rpc.RPCException` >>> rpc.payment_end( ... account="xrb_3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000000", ... wallet="FFFD1BAEC8EC20814BBB9059B393051AAA8380F9B5A2E6B2489A277D81789EEE" ... ) True """ account = self._process_value(account, 'account') wallet = self._process_value(wallet, 'wallet') payload = {"account": account, "wallet": wallet} resp = self.call('payment_end', payload) return resp == {}
[ "def", "payment_end", "(", "self", ",", "account", ",", "wallet", ")", ":", "account", "=", "self", ".", "_process_value", "(", "account", ",", "'account'", ")", "wallet", "=", "self", ".", "_process_value", "(", "wallet", ",", "'wallet'", ")", "payload", "=", "{", "\"account\"", ":", "account", ",", "\"wallet\"", ":", "wallet", "}", "resp", "=", "self", ".", "call", "(", "'payment_end'", ",", "payload", ")", "return", "resp", "==", "{", "}" ]
29.892857
23.964286
def find_implementations(project, resource, offset, resources=None, task_handle=taskhandle.NullTaskHandle()): """Find the places a given method is overridden. Finds the places a method is implemented. Returns a list of `Location`\s. """ name = worder.get_name_at(resource, offset) this_pymodule = project.get_pymodule(resource) pyname = rope.base.evaluate.eval_location(this_pymodule, offset) if pyname is not None: pyobject = pyname.get_object() if not isinstance(pyobject, rope.base.pyobjects.PyFunction) or \ pyobject.get_kind() != 'method': raise exceptions.BadIdentifierError('Not a method!') else: raise exceptions.BadIdentifierError('Cannot resolve the identifier!') def is_defined(occurrence): if not occurrence.is_defined(): return False def not_self(occurrence): if occurrence.get_pyname().get_object() == pyname.get_object(): return False filters = [is_defined, not_self, occurrences.InHierarchyFilter(pyname, True)] finder = occurrences.Finder(project, name, filters=filters) if resources is None: resources = project.get_python_files() job_set = task_handle.create_jobset('Finding Implementations', count=len(resources)) return _find_locations(finder, resources, job_set)
[ "def", "find_implementations", "(", "project", ",", "resource", ",", "offset", ",", "resources", "=", "None", ",", "task_handle", "=", "taskhandle", ".", "NullTaskHandle", "(", ")", ")", ":", "name", "=", "worder", ".", "get_name_at", "(", "resource", ",", "offset", ")", "this_pymodule", "=", "project", ".", "get_pymodule", "(", "resource", ")", "pyname", "=", "rope", ".", "base", ".", "evaluate", ".", "eval_location", "(", "this_pymodule", ",", "offset", ")", "if", "pyname", "is", "not", "None", ":", "pyobject", "=", "pyname", ".", "get_object", "(", ")", "if", "not", "isinstance", "(", "pyobject", ",", "rope", ".", "base", ".", "pyobjects", ".", "PyFunction", ")", "or", "pyobject", ".", "get_kind", "(", ")", "!=", "'method'", ":", "raise", "exceptions", ".", "BadIdentifierError", "(", "'Not a method!'", ")", "else", ":", "raise", "exceptions", ".", "BadIdentifierError", "(", "'Cannot resolve the identifier!'", ")", "def", "is_defined", "(", "occurrence", ")", ":", "if", "not", "occurrence", ".", "is_defined", "(", ")", ":", "return", "False", "def", "not_self", "(", "occurrence", ")", ":", "if", "occurrence", ".", "get_pyname", "(", ")", ".", "get_object", "(", ")", "==", "pyname", ".", "get_object", "(", ")", ":", "return", "False", "filters", "=", "[", "is_defined", ",", "not_self", ",", "occurrences", ".", "InHierarchyFilter", "(", "pyname", ",", "True", ")", "]", "finder", "=", "occurrences", ".", "Finder", "(", "project", ",", "name", ",", "filters", "=", "filters", ")", "if", "resources", "is", "None", ":", "resources", "=", "project", ".", "get_python_files", "(", ")", "job_set", "=", "task_handle", ".", "create_jobset", "(", "'Finding Implementations'", ",", "count", "=", "len", "(", "resources", ")", ")", "return", "_find_locations", "(", "finder", ",", "resources", ",", "job_set", ")" ]
42.272727
18.787879
def create_absolute_values_structure(layer, fields): """Helper function to create the structure for absolute values. :param layer: The vector layer. :type layer: QgsVectorLayer :param fields: List of name field on which we want to aggregate. :type fields: list :return: The data structure. :rtype: dict """ # Let's create a structure like : # key is the index of the field : (flat table, definition name) source_fields = layer.keywords['inasafe_fields'] absolute_fields = [field['key'] for field in count_fields] summaries = {} for field in source_fields: if field in absolute_fields: field_name = source_fields[field] index = layer.fields().lookupField(field_name) flat_table = FlatTable(*fields) summaries[index] = (flat_table, field) return summaries
[ "def", "create_absolute_values_structure", "(", "layer", ",", "fields", ")", ":", "# Let's create a structure like :", "# key is the index of the field : (flat table, definition name)", "source_fields", "=", "layer", ".", "keywords", "[", "'inasafe_fields'", "]", "absolute_fields", "=", "[", "field", "[", "'key'", "]", "for", "field", "in", "count_fields", "]", "summaries", "=", "{", "}", "for", "field", "in", "source_fields", ":", "if", "field", "in", "absolute_fields", ":", "field_name", "=", "source_fields", "[", "field", "]", "index", "=", "layer", ".", "fields", "(", ")", ".", "lookupField", "(", "field_name", ")", "flat_table", "=", "FlatTable", "(", "*", "fields", ")", "summaries", "[", "index", "]", "=", "(", "flat_table", ",", "field", ")", "return", "summaries" ]
35.375
15.791667
def _is_at_qry_end(self, nucmer_hit): '''Returns True iff the hit is "close enough" to the end of the query sequence''' hit_coords = nucmer_hit.qry_coords() return hit_coords.end >= nucmer_hit.qry_length - self.qry_end_tolerance
[ "def", "_is_at_qry_end", "(", "self", ",", "nucmer_hit", ")", ":", "hit_coords", "=", "nucmer_hit", ".", "qry_coords", "(", ")", "return", "hit_coords", ".", "end", ">=", "nucmer_hit", ".", "qry_length", "-", "self", ".", "qry_end_tolerance" ]
62.25
23.75
def _legacy_pubs(buf): """SSH v1 public keys are not supported.""" leftover = buf.read() if leftover: log.warning('skipping leftover: %r', leftover) code = util.pack('B', msg_code('SSH_AGENT_RSA_IDENTITIES_ANSWER')) num = util.pack('L', 0) # no SSH v1 keys return util.frame(code, num)
[ "def", "_legacy_pubs", "(", "buf", ")", ":", "leftover", "=", "buf", ".", "read", "(", ")", "if", "leftover", ":", "log", ".", "warning", "(", "'skipping leftover: %r'", ",", "leftover", ")", "code", "=", "util", ".", "pack", "(", "'B'", ",", "msg_code", "(", "'SSH_AGENT_RSA_IDENTITIES_ANSWER'", ")", ")", "num", "=", "util", ".", "pack", "(", "'L'", ",", "0", ")", "# no SSH v1 keys", "return", "util", ".", "frame", "(", "code", ",", "num", ")" ]
38.875
14.25
def _analyze(self): """ works out the updates to be performed """ if self.value is None or self.value == self.previous: pass elif self._operation == "add": self._additions = self.value elif self._operation == "remove": self._removals = self.value elif self.previous is None: self._assignments = self.value else: # partial update time self._additions = (self.value - self.previous) or None self._removals = (self.previous - self.value) or None self._analyzed = True
[ "def", "_analyze", "(", "self", ")", ":", "if", "self", ".", "value", "is", "None", "or", "self", ".", "value", "==", "self", ".", "previous", ":", "pass", "elif", "self", ".", "_operation", "==", "\"add\"", ":", "self", ".", "_additions", "=", "self", ".", "value", "elif", "self", ".", "_operation", "==", "\"remove\"", ":", "self", ".", "_removals", "=", "self", ".", "value", "elif", "self", ".", "previous", "is", "None", ":", "self", ".", "_assignments", "=", "self", ".", "value", "else", ":", "# partial update time", "self", ".", "_additions", "=", "(", "self", ".", "value", "-", "self", ".", "previous", ")", "or", "None", "self", ".", "_removals", "=", "(", "self", ".", "previous", "-", "self", ".", "value", ")", "or", "None", "self", ".", "_analyzed", "=", "True" ]
39.333333
11.533333
def _clean_css(self): """ Returns the cleaned CSS :param stylesheet: The Stylesheet object to parse :type stylesheet: tinycss.css21.Stylesheet """ # Init the cleaned CSS rules and contents string css_rules = [] # For every rule in the CSS for rule in self.stylesheet.rules: try: # Clean the CSS rule cleaned_rule = self._clean_rule(rule) # Append the rule to matched CSS rules if cleaned_rule is not None: css_rules.append(cleaned_rule) except: # On error, assume the rule matched the tree css_rules.append(rule) return self._build_css(css_rules)
[ "def", "_clean_css", "(", "self", ")", ":", "# Init the cleaned CSS rules and contents string", "css_rules", "=", "[", "]", "# For every rule in the CSS", "for", "rule", "in", "self", ".", "stylesheet", ".", "rules", ":", "try", ":", "# Clean the CSS rule", "cleaned_rule", "=", "self", ".", "_clean_rule", "(", "rule", ")", "# Append the rule to matched CSS rules", "if", "cleaned_rule", "is", "not", "None", ":", "css_rules", ".", "append", "(", "cleaned_rule", ")", "except", ":", "# On error, assume the rule matched the tree", "css_rules", ".", "append", "(", "rule", ")", "return", "self", ".", "_build_css", "(", "css_rules", ")" ]
28.730769
17.269231
def encode_hook(self, hook, msg): """ Encodes a commit hook dict into the protobuf message. Used in bucket properties. :param hook: the hook to encode :type hook: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbCommitHook :rtype riak.pb.riak_pb2.RpbCommitHook """ if 'name' in hook: msg.name = str_to_bytes(hook['name']) else: self.encode_modfun(hook, msg.modfun) return msg
[ "def", "encode_hook", "(", "self", ",", "hook", ",", "msg", ")", ":", "if", "'name'", "in", "hook", ":", "msg", ".", "name", "=", "str_to_bytes", "(", "hook", "[", "'name'", "]", ")", "else", ":", "self", ".", "encode_modfun", "(", "hook", ",", "msg", ".", "modfun", ")", "return", "msg" ]
31.8125
13.0625
def read(self, frames, raw=False): """Read samples from an input stream. The function does not return until the required number of frames has been read. This may involve waiting for the operating system to supply the data. If raw data is requested, the raw cffi data buffer is returned. Otherwise, a numpy array of the appropriate dtype with one column per channel is returned. """ channels, _ = _split(self.channels) dtype, _ = _split(self.dtype) data = ffi.new("signed char[]", channels * dtype.itemsize * frames) self._handle_error(_pa.Pa_ReadStream(self._stream, data, frames)) if not raw: data = np.frombuffer(ffi.buffer(data), dtype=dtype) data.shape = frames, channels return data
[ "def", "read", "(", "self", ",", "frames", ",", "raw", "=", "False", ")", ":", "channels", ",", "_", "=", "_split", "(", "self", ".", "channels", ")", "dtype", ",", "_", "=", "_split", "(", "self", ".", "dtype", ")", "data", "=", "ffi", ".", "new", "(", "\"signed char[]\"", ",", "channels", "*", "dtype", ".", "itemsize", "*", "frames", ")", "self", ".", "_handle_error", "(", "_pa", ".", "Pa_ReadStream", "(", "self", ".", "_stream", ",", "data", ",", "frames", ")", ")", "if", "not", "raw", ":", "data", "=", "np", ".", "frombuffer", "(", "ffi", ".", "buffer", "(", "data", ")", ",", "dtype", "=", "dtype", ")", "data", ".", "shape", "=", "frames", ",", "channels", "return", "data" ]
40.35
18.65
def info(self): """list of tuples with QPImage meta data""" info = [] # meta data meta = self.meta for key in meta: info.append((key, self.meta[key])) # background correction for imdat in [self._amp, self._pha]: info += imdat.info return info
[ "def", "info", "(", "self", ")", ":", "info", "=", "[", "]", "# meta data", "meta", "=", "self", ".", "meta", "for", "key", "in", "meta", ":", "info", ".", "append", "(", "(", "key", ",", "self", ".", "meta", "[", "key", "]", ")", ")", "# background correction", "for", "imdat", "in", "[", "self", ".", "_amp", ",", "self", ".", "_pha", "]", ":", "info", "+=", "imdat", ".", "info", "return", "info" ]
29.090909
13.727273
def make_feature_dict(feature_sequence): """A feature dict is a convenient way to organize a sequence of Feature object (which you have got, e.g., from parse_GFF). The function returns a dict with all the feature types as keys. Each value of this dict is again a dict, now of feature names. The values of this dict is a list of feature. An example makes this clear. Let's say you load the C. elegans GTF file from Ensemble and make a feature dict: >>> worm_features_dict = HTSeq.make_feature_dict(HTSeq.parse_GFF( ... "test_data/Caenorhabditis_elegans.WS200.55.gtf.gz")) (This command may take a few minutes to deal with the 430,000 features in the GTF file. Note that you may need a lot of RAM if you have millions of features.) Then, you can simply access, say, exon 0 of gene "F08E10.4" as follows: >>> worm_features_dict[ 'exon' ][ 'F08E10.4' ][ 0 ] <GenomicFeature: exon 'F08E10.4' at V: 17479353 -> 17479001 (strand '-')> """ res = {} for f in feature_sequence: if f.type not in res: res[f.type] = {} res_ftype = res[f.type] if f.name not in res_ftype: res_ftype[f.name] = [f] else: res_ftype[f.name].append(f) return res
[ "def", "make_feature_dict", "(", "feature_sequence", ")", ":", "res", "=", "{", "}", "for", "f", "in", "feature_sequence", ":", "if", "f", ".", "type", "not", "in", "res", ":", "res", "[", "f", ".", "type", "]", "=", "{", "}", "res_ftype", "=", "res", "[", "f", ".", "type", "]", "if", "f", ".", "name", "not", "in", "res_ftype", ":", "res_ftype", "[", "f", ".", "name", "]", "=", "[", "f", "]", "else", ":", "res_ftype", "[", "f", ".", "name", "]", ".", "append", "(", "f", ")", "return", "res" ]
37.848485
22.757576
def load_locate_library(candidates, cygwin_lib, name, win_cls=None, cygwin_cls=None, others_cls=None, find_library=None, check_symbols=None): """Locates and loads a library. Returns: the loaded library arguments: * candidates -- candidates list for locate_library() * cygwin_lib -- name of the cygwin library * name -- lib identifier (for logging). Defaults to None. * win_cls -- class that is used to instantiate the library on win32 platforms. Defaults to None (-> ctypes.CDLL). * cygwin_cls -- library class for cygwin platforms. Defaults to None (-> ctypes.CDLL). * others_cls -- library class for all other platforms. Defaults to None (-> ctypes.CDLL). * find_library -- see locate_library(). Defaults to None. * check_symbols -- either None or a list of symbols that the loaded lib must provide (hasattr(<>)) in order to be considered valid. LibraryMissingSymbolsException is raised if any symbol is missing. raises: * NoLibraryCandidatesException * LibraryNotFoundException * LibraryNotLoadedException * LibraryMissingSymbolsException """ if sys.platform == 'cygwin': if cygwin_lib: loaded_lib = load_library(cygwin_lib, name, cygwin_cls) else: raise NoLibraryCandidatesException(name) elif candidates: lib = locate_library(candidates, find_library) if lib: if sys.platform == 'win32': loaded_lib = load_library(lib, name, win_cls) else: loaded_lib = load_library(lib, name, others_cls) else: _LOGGER.error('%r could not be found', (name or candidates)) raise LibraryNotFoundException(name) else: raise NoLibraryCandidatesException(name) if loaded_lib is None: raise LibraryNotLoadedException(name) elif check_symbols: symbols_missing = [ s for s in check_symbols if not hasattr(loaded_lib, s) ] if symbols_missing: msg = ('%r, missing symbols: %r', lib, symbols_missing ) _LOGGER.error(msg) raise LibraryMissingSymbolsException(lib) else: return loaded_lib else: return loaded_lib
[ "def", "load_locate_library", "(", "candidates", ",", "cygwin_lib", ",", "name", ",", "win_cls", "=", "None", ",", "cygwin_cls", "=", "None", ",", "others_cls", "=", "None", ",", "find_library", "=", "None", ",", "check_symbols", "=", "None", ")", ":", "if", "sys", ".", "platform", "==", "'cygwin'", ":", "if", "cygwin_lib", ":", "loaded_lib", "=", "load_library", "(", "cygwin_lib", ",", "name", ",", "cygwin_cls", ")", "else", ":", "raise", "NoLibraryCandidatesException", "(", "name", ")", "elif", "candidates", ":", "lib", "=", "locate_library", "(", "candidates", ",", "find_library", ")", "if", "lib", ":", "if", "sys", ".", "platform", "==", "'win32'", ":", "loaded_lib", "=", "load_library", "(", "lib", ",", "name", ",", "win_cls", ")", "else", ":", "loaded_lib", "=", "load_library", "(", "lib", ",", "name", ",", "others_cls", ")", "else", ":", "_LOGGER", ".", "error", "(", "'%r could not be found'", ",", "(", "name", "or", "candidates", ")", ")", "raise", "LibraryNotFoundException", "(", "name", ")", "else", ":", "raise", "NoLibraryCandidatesException", "(", "name", ")", "if", "loaded_lib", "is", "None", ":", "raise", "LibraryNotLoadedException", "(", "name", ")", "elif", "check_symbols", ":", "symbols_missing", "=", "[", "s", "for", "s", "in", "check_symbols", "if", "not", "hasattr", "(", "loaded_lib", ",", "s", ")", "]", "if", "symbols_missing", ":", "msg", "=", "(", "'%r, missing symbols: %r'", ",", "lib", ",", "symbols_missing", ")", "_LOGGER", ".", "error", "(", "msg", ")", "raise", "LibraryMissingSymbolsException", "(", "lib", ")", "else", ":", "return", "loaded_lib", "else", ":", "return", "loaded_lib" ]
39.442623
20.229508
def update_account(self, email=None, company_name=None, first_name=None, last_name=None, address=None, postal_code=None, city=None, state=None, country=None, phone=None): """ :: POST /:login :param email: Email address :type email: :py:class:`basestring` :param company_name: Company name :type company_name: :py:class:`basestring` :param first_name: First name :type first_name: :py:class:`basestring` :param last_name: Last name :type last_name: :py:class:`basestring` :param address: Address :type address: :py:class:`basestring` :param postal_code: Postal code :type postal_code: :py:class:`basestring` :param city: City :type city: :py:class:`basestring` :param state: State :type state: :py:class:`basestring` :param country: Country :type country: :py:class:`basestring` :param phone: Phone :type phone: :py:class:`basestring` :Returns: a dictionary with updated account info :rtype: :py:class:`dict` """ params = {} if email: params['email'] = email if company_name: params['companyName'] = company_name if first_name: params['firstName'] = first_name if last_name: params['lastName'] = last_name if address: params['address'] = address if postal_code: params['postalCode'] = postal_code if city: params['city'] = city if state: params['state'] = state if country: params['country'] = country if phone: params['phone'] = phone j, _ = self.request('POST', '', params=params) return j
[ "def", "update_account", "(", "self", ",", "email", "=", "None", ",", "company_name", "=", "None", ",", "first_name", "=", "None", ",", "last_name", "=", "None", ",", "address", "=", "None", ",", "postal_code", "=", "None", ",", "city", "=", "None", ",", "state", "=", "None", ",", "country", "=", "None", ",", "phone", "=", "None", ")", ":", "params", "=", "{", "}", "if", "email", ":", "params", "[", "'email'", "]", "=", "email", "if", "company_name", ":", "params", "[", "'companyName'", "]", "=", "company_name", "if", "first_name", ":", "params", "[", "'firstName'", "]", "=", "first_name", "if", "last_name", ":", "params", "[", "'lastName'", "]", "=", "last_name", "if", "address", ":", "params", "[", "'address'", "]", "=", "address", "if", "postal_code", ":", "params", "[", "'postalCode'", "]", "=", "postal_code", "if", "city", ":", "params", "[", "'city'", "]", "=", "city", "if", "state", ":", "params", "[", "'state'", "]", "=", "state", "if", "country", ":", "params", "[", "'country'", "]", "=", "country", "if", "phone", ":", "params", "[", "'phone'", "]", "=", "phone", "j", ",", "_", "=", "self", ".", "request", "(", "'POST'", ",", "''", ",", "params", "=", "params", ")", "return", "j" ]
29.625
15.0625
def changed_bytes(self, other): """ Gets the set of changed bytes between self and other. """ changes = set() l.warning("FastMemory.changed_bytes(): This implementation is very slow and only for debug purposes.") for addr,v in self._contents.items(): for i in range(self.width): other_byte = other.load(addr+i, 1) our_byte = v.get_byte(i) if other_byte is our_byte: changes.add(addr+i) return changes
[ "def", "changed_bytes", "(", "self", ",", "other", ")", ":", "changes", "=", "set", "(", ")", "l", ".", "warning", "(", "\"FastMemory.changed_bytes(): This implementation is very slow and only for debug purposes.\"", ")", "for", "addr", ",", "v", "in", "self", ".", "_contents", ".", "items", "(", ")", ":", "for", "i", "in", "range", "(", "self", ".", "width", ")", ":", "other_byte", "=", "other", ".", "load", "(", "addr", "+", "i", ",", "1", ")", "our_byte", "=", "v", ".", "get_byte", "(", "i", ")", "if", "other_byte", "is", "our_byte", ":", "changes", ".", "add", "(", "addr", "+", "i", ")", "return", "changes" ]
32.75
17.125
def LighterColor(self, level): '''Create a new instance based on this one but lighter. Parameters: :level: The amount by which the color should be lightened to produce the new one [0...1]. Returns: A grapefruit.Color instance. >>> Color.NewFromHsl(30, 1, 0.5).LighterColor(0.25) (1.0, 0.75, 0.5, 1.0) >>> Color.NewFromHsl(30, 1, 0.5).LighterColor(0.25).hsl (30, 1, 0.75) ''' h, s, l = self.__hsl return Color((h, s, min(l + level, 1)), 'hsl', self.__a, self.__wref)
[ "def", "LighterColor", "(", "self", ",", "level", ")", ":", "h", ",", "s", ",", "l", "=", "self", ".", "__hsl", "return", "Color", "(", "(", "h", ",", "s", ",", "min", "(", "l", "+", "level", ",", "1", ")", ")", ",", "'hsl'", ",", "self", ".", "__a", ",", "self", ".", "__wref", ")" ]
27.315789
24.684211
def overlay(main_parent_node, overlay_parent_node, eof_action='repeat', **kwargs): """Overlay one video on top of another. Args: x: Set the expression for the x coordinates of the overlaid video on the main video. Default value is 0. In case the expression is invalid, it is set to a huge value (meaning that the overlay will not be displayed within the output visible area). y: Set the expression for the y coordinates of the overlaid video on the main video. Default value is 0. In case the expression is invalid, it is set to a huge value (meaning that the overlay will not be displayed within the output visible area). eof_action: The action to take when EOF is encountered on the secondary input; it accepts one of the following values: * ``repeat``: Repeat the last frame (the default). * ``endall``: End both streams. * ``pass``: Pass the main input through. eval: Set when the expressions for x, and y are evaluated. It accepts the following values: * ``init``: only evaluate expressions once during the filter initialization or when a command is processed * ``frame``: evaluate expressions for each incoming frame Default value is ``frame``. shortest: If set to 1, force the output to terminate when the shortest input terminates. Default value is 0. format: Set the format for the output video. It accepts the following values: * ``yuv420``: force YUV420 output * ``yuv422``: force YUV422 output * ``yuv444``: force YUV444 output * ``rgb``: force packed RGB output * ``gbrp``: force planar RGB output Default value is ``yuv420``. rgb (deprecated): If set to 1, force the filter to accept inputs in the RGB color space. Default value is 0. This option is deprecated, use format instead. repeatlast: If set to 1, force the filter to draw the last overlay frame over the main input until the end of the stream. A value of 0 disables this behavior. Default value is 1. Official documentation: `overlay <https://ffmpeg.org/ffmpeg-filters.html#overlay-1>`__ """ kwargs['eof_action'] = eof_action return FilterNode([main_parent_node, overlay_parent_node], overlay.__name__, kwargs=kwargs, max_inputs=2).stream()
[ "def", "overlay", "(", "main_parent_node", ",", "overlay_parent_node", ",", "eof_action", "=", "'repeat'", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'eof_action'", "]", "=", "eof_action", "return", "FilterNode", "(", "[", "main_parent_node", ",", "overlay_parent_node", "]", ",", "overlay", ".", "__name__", ",", "kwargs", "=", "kwargs", ",", "max_inputs", "=", "2", ")", ".", "stream", "(", ")" ]
54.088889
32.133333
def clean(self, list_article_candidates): """Iterates over each article_candidate and cleans every extracted data. :param list_article_candidates: A list, the list of ArticleCandidate-Objects which have been extracted :return: A list, the list with the cleaned ArticleCandidate-Objects """ # Save cleaned article_candidates in results. results = [] for article_candidate in list_article_candidates: article_candidate.title = self.do_cleaning(article_candidate.title) article_candidate.description = self.do_cleaning(article_candidate.description) article_candidate.text = self.do_cleaning(article_candidate.text) article_candidate.topimage = self.do_cleaning(article_candidate.topimage) article_candidate.author = self.do_cleaning(article_candidate.author) article_candidate.publish_date = self.do_cleaning(article_candidate.publish_date) results.append(article_candidate) return results
[ "def", "clean", "(", "self", ",", "list_article_candidates", ")", ":", "# Save cleaned article_candidates in results.", "results", "=", "[", "]", "for", "article_candidate", "in", "list_article_candidates", ":", "article_candidate", ".", "title", "=", "self", ".", "do_cleaning", "(", "article_candidate", ".", "title", ")", "article_candidate", ".", "description", "=", "self", ".", "do_cleaning", "(", "article_candidate", ".", "description", ")", "article_candidate", ".", "text", "=", "self", ".", "do_cleaning", "(", "article_candidate", ".", "text", ")", "article_candidate", ".", "topimage", "=", "self", ".", "do_cleaning", "(", "article_candidate", ".", "topimage", ")", "article_candidate", ".", "author", "=", "self", ".", "do_cleaning", "(", "article_candidate", ".", "author", ")", "article_candidate", ".", "publish_date", "=", "self", ".", "do_cleaning", "(", "article_candidate", ".", "publish_date", ")", "results", ".", "append", "(", "article_candidate", ")", "return", "results" ]
51
30.25
def cmd_p4a(self, *args): ''' Run p4a commands. Args must come after --, or use --alias to make an alias ''' self.check_requirements() self.install_platform() args = args[0] if args and args[0] == '--alias': print('To set up p4a in this shell session, execute:') print(' alias p4a=$(buildozer {} p4a --alias 2>&1 >/dev/null)' .format(self.targetname)) sys.stderr.write('PYTHONPATH={} {}\n'.format(self.pa_dir, self._p4a_cmd)) else: self._p4a(' '.join(args) if args else '')
[ "def", "cmd_p4a", "(", "self", ",", "*", "args", ")", ":", "self", ".", "check_requirements", "(", ")", "self", ".", "install_platform", "(", ")", "args", "=", "args", "[", "0", "]", "if", "args", "and", "args", "[", "0", "]", "==", "'--alias'", ":", "print", "(", "'To set up p4a in this shell session, execute:'", ")", "print", "(", "' alias p4a=$(buildozer {} p4a --alias 2>&1 >/dev/null)'", ".", "format", "(", "self", ".", "targetname", ")", ")", "sys", ".", "stderr", ".", "write", "(", "'PYTHONPATH={} {}\\n'", ".", "format", "(", "self", ".", "pa_dir", ",", "self", ".", "_p4a_cmd", ")", ")", "else", ":", "self", ".", "_p4a", "(", "' '", ".", "join", "(", "args", ")", "if", "args", "else", "''", ")" ]
40
18.4
def find_user(session, username): """Find user by name - returns user ID.""" resp = _make_request(session, FIND_USER_URL, username) if not resp: raise VooblyError('user not found') try: return int(resp[0]['uid']) except ValueError: raise VooblyError('user not found')
[ "def", "find_user", "(", "session", ",", "username", ")", ":", "resp", "=", "_make_request", "(", "session", ",", "FIND_USER_URL", ",", "username", ")", "if", "not", "resp", ":", "raise", "VooblyError", "(", "'user not found'", ")", "try", ":", "return", "int", "(", "resp", "[", "0", "]", "[", "'uid'", "]", ")", "except", "ValueError", ":", "raise", "VooblyError", "(", "'user not found'", ")" ]
33.666667
12.333333
def last_modified(self): """ The last modified time of the requirement's source distribution archive(s) (a number). The value of this property is based on the :attr:`related_archives` property. If no related archives are found the current time is reported. In the balance between not invalidating cached binary distributions enough and invalidating them too frequently, this property causes the latter to happen. """ mtimes = list(map(os.path.getmtime, self.related_archives)) return max(mtimes) if mtimes else time.time()
[ "def", "last_modified", "(", "self", ")", ":", "mtimes", "=", "list", "(", "map", "(", "os", ".", "path", ".", "getmtime", ",", "self", ".", "related_archives", ")", ")", "return", "max", "(", "mtimes", ")", "if", "mtimes", "else", "time", ".", "time", "(", ")" ]
49.333333
23.5
def unfollow(user, obj, send_action=False, flag=''): """ Removes a "follow" relationship. Set ``send_action`` to ``True`` (``False is default) to also send a ``<user> stopped following <object>`` action signal. Pass a string value to ``flag`` to determine which type of "follow" relationship you want to remove. Example:: unfollow(request.user, other_user) unfollow(request.user, other_user, flag='watching') """ check(obj) qs = apps.get_model('actstream', 'follow').objects.filter( user=user, object_id=obj.pk, content_type=ContentType.objects.get_for_model(obj) ) if flag: qs = qs.filter(flag=flag) qs.delete() if send_action: if not flag: action.send(user, verb=_('stopped following'), target=obj) else: action.send(user, verb=_('stopped %s' % flag), target=obj)
[ "def", "unfollow", "(", "user", ",", "obj", ",", "send_action", "=", "False", ",", "flag", "=", "''", ")", ":", "check", "(", "obj", ")", "qs", "=", "apps", ".", "get_model", "(", "'actstream'", ",", "'follow'", ")", ".", "objects", ".", "filter", "(", "user", "=", "user", ",", "object_id", "=", "obj", ".", "pk", ",", "content_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "obj", ")", ")", "if", "flag", ":", "qs", "=", "qs", ".", "filter", "(", "flag", "=", "flag", ")", "qs", ".", "delete", "(", ")", "if", "send_action", ":", "if", "not", "flag", ":", "action", ".", "send", "(", "user", ",", "verb", "=", "_", "(", "'stopped following'", ")", ",", "target", "=", "obj", ")", "else", ":", "action", ".", "send", "(", "user", ",", "verb", "=", "_", "(", "'stopped %s'", "%", "flag", ")", ",", "target", "=", "obj", ")" ]
30.172414
24.448276
def is_collection(item): """ Returns True if the item is a collection class: list, tuple, set, frozenset or any other class that resembles one of these (using abstract base classes). >>> is_collection(0) False >>> is_collection(0.1) False >>> is_collection('') False >>> is_collection({}) False >>> is_collection({}.keys()) True >>> is_collection([]) True >>> is_collection(()) True >>> is_collection(set()) True >>> is_collection(frozenset()) True >>> from coaster.utils import InspectableSet >>> is_collection(InspectableSet({1, 2})) True """ return not isinstance(item, six.string_types) and isinstance(item, (collections.Set, collections.Sequence))
[ "def", "is_collection", "(", "item", ")", ":", "return", "not", "isinstance", "(", "item", ",", "six", ".", "string_types", ")", "and", "isinstance", "(", "item", ",", "(", "collections", ".", "Set", ",", "collections", ".", "Sequence", ")", ")" ]
26.071429
23.285714
def heightmap_get_normal( hm: np.ndarray, x: float, y: float, waterLevel: float ) -> Tuple[float, float, float]: """Return the map normal at given coordinates. Args: hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions. x (float): The x coordinate. y (float): The y coordinate. waterLevel (float): The heightmap is considered flat below this value. Returns: Tuple[float, float, float]: An (x, y, z) vector normal. """ cn = ffi.new("float[3]") lib.TCOD_heightmap_get_normal(_heightmap_cdata(hm), x, y, cn, waterLevel) return tuple(cn)
[ "def", "heightmap_get_normal", "(", "hm", ":", "np", ".", "ndarray", ",", "x", ":", "float", ",", "y", ":", "float", ",", "waterLevel", ":", "float", ")", "->", "Tuple", "[", "float", ",", "float", ",", "float", "]", ":", "cn", "=", "ffi", ".", "new", "(", "\"float[3]\"", ")", "lib", ".", "TCOD_heightmap_get_normal", "(", "_heightmap_cdata", "(", "hm", ")", ",", "x", ",", "y", ",", "cn", ",", "waterLevel", ")", "return", "tuple", "(", "cn", ")" ]
35.764706
20.882353
def set_mode_send(self, target_system, base_mode, custom_mode, force_mavlink1=False): ''' THIS INTERFACE IS DEPRECATED. USE COMMAND_LONG with MAV_CMD_DO_SET_MODE INSTEAD. Set the system mode, as defined by enum MAV_MODE. There is no target component id as the mode is by definition for the overall aircraft, not only for one component. target_system : The system setting the mode (uint8_t) base_mode : The new base mode (uint8_t) custom_mode : The new autopilot-specific mode. This field can be ignored by an autopilot. (uint32_t) ''' return self.send(self.set_mode_encode(target_system, base_mode, custom_mode), force_mavlink1=force_mavlink1)
[ "def", "set_mode_send", "(", "self", ",", "target_system", ",", "base_mode", ",", "custom_mode", ",", "force_mavlink1", "=", "False", ")", ":", "return", "self", ".", "send", "(", "self", ".", "set_mode_encode", "(", "target_system", ",", "base_mode", ",", "custom_mode", ")", ",", "force_mavlink1", "=", "force_mavlink1", ")" ]
60.714286
38.142857
def unlearn(taskPkgName, deleteAll=False): """ Find the task named taskPkgName, and delete any/all user-owned .cfg files in the user's resource directory which apply to that task. Like a unix utility, this returns 0 on success (no files found or only 1 found but deleted). For multiple files found, this uses deleteAll, returning the file-name-list if deleteAll is False (to indicate the problem) and without deleting any files. MUST check return value. This does not prompt the user or print to the screen. """ # this WILL throw an exception if the taskPkgName isn't found flist = cfgpars.getUsrCfgFilesForPyPkg(taskPkgName) # can raise if flist is None or len(flist) == 0: return 0 if len(flist) == 1: os.remove(flist[0]) return 0 # at this point, we know more than one matching file was found if deleteAll: for f in flist: os.remove(f) return 0 else: return flist
[ "def", "unlearn", "(", "taskPkgName", ",", "deleteAll", "=", "False", ")", ":", "# this WILL throw an exception if the taskPkgName isn't found", "flist", "=", "cfgpars", ".", "getUsrCfgFilesForPyPkg", "(", "taskPkgName", ")", "# can raise", "if", "flist", "is", "None", "or", "len", "(", "flist", ")", "==", "0", ":", "return", "0", "if", "len", "(", "flist", ")", "==", "1", ":", "os", ".", "remove", "(", "flist", "[", "0", "]", ")", "return", "0", "# at this point, we know more than one matching file was found", "if", "deleteAll", ":", "for", "f", "in", "flist", ":", "os", ".", "remove", "(", "f", ")", "return", "0", "else", ":", "return", "flist" ]
41.869565
21.304348
def predict(self, X): """Perform regression on test vectors X. Parameters ---------- X : array-like, shape = [n_samples, n_features] Input vectors, where n_samples is the number of samples and n_features is the number of features. Returns ------- y : array, shape = [n_samples] Predicted values for X. """ if not hasattr(self, '_program'): raise NotFittedError('SymbolicRegressor not fitted.') X = check_array(X) _, n_features = X.shape if self.n_features_ != n_features: raise ValueError('Number of features of the model must match the ' 'input. Model n_features is %s and input ' 'n_features is %s.' % (self.n_features_, n_features)) y = self._program.execute(X) return y
[ "def", "predict", "(", "self", ",", "X", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_program'", ")", ":", "raise", "NotFittedError", "(", "'SymbolicRegressor not fitted.'", ")", "X", "=", "check_array", "(", "X", ")", "_", ",", "n_features", "=", "X", ".", "shape", "if", "self", ".", "n_features_", "!=", "n_features", ":", "raise", "ValueError", "(", "'Number of features of the model must match the '", "'input. Model n_features is %s and input '", "'n_features is %s.'", "%", "(", "self", ".", "n_features_", ",", "n_features", ")", ")", "y", "=", "self", ".", "_program", ".", "execute", "(", "X", ")", "return", "y" ]
31.37931
20.448276
def _websafe_component(c, alt=False): """Convert a color component to its web safe equivalent. Parameters: :c: The component value [0...1] :alt: If True, return the alternative value instead of the nearest one. Returns: The web safe equivalent of the component value. """ # This sucks, but floating point between 0 and 1 is quite fuzzy... # So we just change the scale a while to make the equality tests # work, otherwise it gets wrong at some decimal far to the right. sc = c * 100.0 # If the color is already safe, return it straight away d = sc % 20 if d==0: return c # Get the lower and upper safe values l = sc - d u = l + 20 # Return the 'closest' value according to the alt flag if alt: if (sc-l) >= (u-sc): return l/100.0 else: return u/100.0 else: if (sc-l) >= (u-sc): return u/100.0 else: return l/100.0
[ "def", "_websafe_component", "(", "c", ",", "alt", "=", "False", ")", ":", "# This sucks, but floating point between 0 and 1 is quite fuzzy...", "# So we just change the scale a while to make the equality tests", "# work, otherwise it gets wrong at some decimal far to the right.", "sc", "=", "c", "*", "100.0", "# If the color is already safe, return it straight away", "d", "=", "sc", "%", "20", "if", "d", "==", "0", ":", "return", "c", "# Get the lower and upper safe values", "l", "=", "sc", "-", "d", "u", "=", "l", "+", "20", "# Return the 'closest' value according to the alt flag", "if", "alt", ":", "if", "(", "sc", "-", "l", ")", ">=", "(", "u", "-", "sc", ")", ":", "return", "l", "/", "100.0", "else", ":", "return", "u", "/", "100.0", "else", ":", "if", "(", "sc", "-", "l", ")", ">=", "(", "u", "-", "sc", ")", ":", "return", "u", "/", "100.0", "else", ":", "return", "l", "/", "100.0" ]
26.090909
22.848485
def mdownload(args): """ %prog mdownload links.txt Multiple download a list of files. Use formats.html.links() to extract the links file. """ from jcvi.apps.grid import Jobs p = OptionParser(mdownload.__doc__) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) linksfile, = args links = [(x.strip(),) for x in open(linksfile)] j = Jobs(download, links) j.run()
[ "def", "mdownload", "(", "args", ")", ":", "from", "jcvi", ".", "apps", ".", "grid", "import", "Jobs", "p", "=", "OptionParser", "(", "mdownload", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "1", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "linksfile", ",", "=", "args", "links", "=", "[", "(", "x", ".", "strip", "(", ")", ",", ")", "for", "x", "in", "open", "(", "linksfile", ")", "]", "j", "=", "Jobs", "(", "download", ",", "links", ")", "j", ".", "run", "(", ")" ]
22.894737
18.789474
def _init_config(self): """If no config file exists, create it and add default options. Default LevelDB path is specified based on OS dynamic loading is set to infura by default in the file Returns: leveldb directory """ system = platform.system().lower() leveldb_fallback_dir = os.path.expanduser("~") if system.startswith("darwin"): leveldb_fallback_dir = os.path.join( leveldb_fallback_dir, "Library", "Ethereum" ) elif system.startswith("windows"): leveldb_fallback_dir = os.path.join( leveldb_fallback_dir, "AppData", "Roaming", "Ethereum" ) else: leveldb_fallback_dir = os.path.join(leveldb_fallback_dir, ".ethereum") leveldb_fallback_dir = os.path.join(leveldb_fallback_dir, "geth", "chaindata") if not os.path.exists(self.config_path): log.info("No config file found. Creating default: " + self.config_path) open(self.config_path, "a").close() config = ConfigParser(allow_no_value=True) config.optionxform = str config.read(self.config_path, "utf-8") if "defaults" not in config.sections(): self._add_default_options(config) if not config.has_option("defaults", "leveldb_dir"): self._add_leveldb_option(config, leveldb_fallback_dir) if not config.has_option("defaults", "dynamic_loading"): self._add_dynamic_loading_option(config) with codecs.open(self.config_path, "w", "utf-8") as fp: config.write(fp) leveldb_dir = config.get( "defaults", "leveldb_dir", fallback=leveldb_fallback_dir ) return os.path.expanduser(leveldb_dir)
[ "def", "_init_config", "(", "self", ")", ":", "system", "=", "platform", ".", "system", "(", ")", ".", "lower", "(", ")", "leveldb_fallback_dir", "=", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", "if", "system", ".", "startswith", "(", "\"darwin\"", ")", ":", "leveldb_fallback_dir", "=", "os", ".", "path", ".", "join", "(", "leveldb_fallback_dir", ",", "\"Library\"", ",", "\"Ethereum\"", ")", "elif", "system", ".", "startswith", "(", "\"windows\"", ")", ":", "leveldb_fallback_dir", "=", "os", ".", "path", ".", "join", "(", "leveldb_fallback_dir", ",", "\"AppData\"", ",", "\"Roaming\"", ",", "\"Ethereum\"", ")", "else", ":", "leveldb_fallback_dir", "=", "os", ".", "path", ".", "join", "(", "leveldb_fallback_dir", ",", "\".ethereum\"", ")", "leveldb_fallback_dir", "=", "os", ".", "path", ".", "join", "(", "leveldb_fallback_dir", ",", "\"geth\"", ",", "\"chaindata\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "config_path", ")", ":", "log", ".", "info", "(", "\"No config file found. Creating default: \"", "+", "self", ".", "config_path", ")", "open", "(", "self", ".", "config_path", ",", "\"a\"", ")", ".", "close", "(", ")", "config", "=", "ConfigParser", "(", "allow_no_value", "=", "True", ")", "config", ".", "optionxform", "=", "str", "config", ".", "read", "(", "self", ".", "config_path", ",", "\"utf-8\"", ")", "if", "\"defaults\"", "not", "in", "config", ".", "sections", "(", ")", ":", "self", ".", "_add_default_options", "(", "config", ")", "if", "not", "config", ".", "has_option", "(", "\"defaults\"", ",", "\"leveldb_dir\"", ")", ":", "self", ".", "_add_leveldb_option", "(", "config", ",", "leveldb_fallback_dir", ")", "if", "not", "config", ".", "has_option", "(", "\"defaults\"", ",", "\"dynamic_loading\"", ")", ":", "self", ".", "_add_dynamic_loading_option", "(", "config", ")", "with", "codecs", ".", "open", "(", "self", ".", "config_path", ",", "\"w\"", ",", "\"utf-8\"", ")", "as", "fp", ":", "config", ".", "write", "(", "fp", ")", "leveldb_dir", "=", "config", ".", "get", "(", "\"defaults\"", ",", "\"leveldb_dir\"", ",", "fallback", "=", "leveldb_fallback_dir", ")", "return", "os", ".", "path", ".", "expanduser", "(", "leveldb_dir", ")" ]
38.911111
20.333333
def detect_number_of_cores(): """ Detects the number of cores on a system. Cribbed from pp. """ # Linux, Unix and MacOS: if hasattr(os, "sysconf"): if "SC_NPROCESSORS_ONLN" in os.sysconf_names: # Linux & Unix: ncpus = os.sysconf("SC_NPROCESSORS_ONLN") if isinstance(ncpus, int) and ncpus > 0: return ncpus else: # OSX: return int(subprocess.check_output(["sysctl", "-n", "hw.ncpu"])) # Windows: try: ncpus = int(os.environ.get("NUMBER_OF_PROCESSORS", "")) if ncpus > 0: return ncpus except ValueError: pass return 1
[ "def", "detect_number_of_cores", "(", ")", ":", "# Linux, Unix and MacOS:", "if", "hasattr", "(", "os", ",", "\"sysconf\"", ")", ":", "if", "\"SC_NPROCESSORS_ONLN\"", "in", "os", ".", "sysconf_names", ":", "# Linux & Unix:", "ncpus", "=", "os", ".", "sysconf", "(", "\"SC_NPROCESSORS_ONLN\"", ")", "if", "isinstance", "(", "ncpus", ",", "int", ")", "and", "ncpus", ">", "0", ":", "return", "ncpus", "else", ":", "# OSX:", "return", "int", "(", "subprocess", ".", "check_output", "(", "[", "\"sysctl\"", ",", "\"-n\"", ",", "\"hw.ncpu\"", "]", ")", ")", "# Windows:", "try", ":", "ncpus", "=", "int", "(", "os", ".", "environ", ".", "get", "(", "\"NUMBER_OF_PROCESSORS\"", ",", "\"\"", ")", ")", "if", "ncpus", ">", "0", ":", "return", "ncpus", "except", "ValueError", ":", "pass", "return", "1" ]
30.857143
17.238095
def __raise_user_error(self, view): """ Raises an error if the given View has been set read only and the user attempted to edit its content. :param view: View. :type view: QWidget """ raise foundations.exceptions.UserError("{0} | Cannot perform action, '{1}' View has been set read only!".format( self.__class__.__name__, view.objectName() or view))
[ "def", "__raise_user_error", "(", "self", ",", "view", ")", ":", "raise", "foundations", ".", "exceptions", ".", "UserError", "(", "\"{0} | Cannot perform action, '{1}' View has been set read only!\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "view", ".", "objectName", "(", ")", "or", "view", ")", ")" ]
40.2
28.4
def run_node(self, node, stim): ''' Executes the Transformer at a specific node. Args: node (str, Node): If a string, the name of the Node in the current Graph. Otherwise the Node instance to execute. stim (str, stim, list): Any valid input to the Transformer stored at the target node. ''' if isinstance(node, string_types): node = self.nodes[node] result = node.transformer.transform(stim) if node.is_leaf(): return listify(result) stim = result # If result is a generator, the first child will destroy the # iterable, so cache via list conversion if len(node.children) > 1 and isgenerator(stim): stim = list(stim) return list(chain(*[self.run_node(c, stim) for c in node.children]))
[ "def", "run_node", "(", "self", ",", "node", ",", "stim", ")", ":", "if", "isinstance", "(", "node", ",", "string_types", ")", ":", "node", "=", "self", ".", "nodes", "[", "node", "]", "result", "=", "node", ".", "transformer", ".", "transform", "(", "stim", ")", "if", "node", ".", "is_leaf", "(", ")", ":", "return", "listify", "(", "result", ")", "stim", "=", "result", "# If result is a generator, the first child will destroy the", "# iterable, so cache via list conversion", "if", "len", "(", "node", ".", "children", ")", ">", "1", "and", "isgenerator", "(", "stim", ")", ":", "stim", "=", "list", "(", "stim", ")", "return", "list", "(", "chain", "(", "*", "[", "self", ".", "run_node", "(", "c", ",", "stim", ")", "for", "c", "in", "node", ".", "children", "]", ")", ")" ]
38.5
20.772727
def get(self, url=None, delimiter="/"): """Path is an s3 url. Ommiting the path or providing "s3://" as the path will return a list of all buckets. Otherwise, all subdirectories and their contents will be shown. """ params = {'Delimiter': delimiter} bucket, obj_key = _parse_url(url) if bucket: params['Bucket'] = bucket else: return self.call("ListBuckets", response_data_key="Buckets") if obj_key: params['Prefix'] = obj_key objects = self.call("ListObjects", response_data_key="Contents", **params) if objects: for obj in objects: obj['url'] = "s3://{0}/{1}".format(bucket, obj['Key']) return objects
[ "def", "get", "(", "self", ",", "url", "=", "None", ",", "delimiter", "=", "\"/\"", ")", ":", "params", "=", "{", "'Delimiter'", ":", "delimiter", "}", "bucket", ",", "obj_key", "=", "_parse_url", "(", "url", ")", "if", "bucket", ":", "params", "[", "'Bucket'", "]", "=", "bucket", "else", ":", "return", "self", ".", "call", "(", "\"ListBuckets\"", ",", "response_data_key", "=", "\"Buckets\"", ")", "if", "obj_key", ":", "params", "[", "'Prefix'", "]", "=", "obj_key", "objects", "=", "self", ".", "call", "(", "\"ListObjects\"", ",", "response_data_key", "=", "\"Contents\"", ",", "*", "*", "params", ")", "if", "objects", ":", "for", "obj", "in", "objects", ":", "obj", "[", "'url'", "]", "=", "\"s3://{0}/{1}\"", ".", "format", "(", "bucket", ",", "obj", "[", "'Key'", "]", ")", "return", "objects" ]
33.608696
18.304348
def Analyze(self, hashes): """Looks up hashes in VirusTotal using the VirusTotal HTTP API. The API is documented here: https://www.virustotal.com/en/documentation/public-api/ Args: hashes (list[str]): hashes to look up. Returns: list[HashAnalysis]: analysis results. Raises: RuntimeError: If the VirusTotal API key has not been set. """ if not self._api_key: raise RuntimeError('No API key specified for VirusTotal lookup.') hash_analyses = [] json_response = self._QueryHashes(hashes) or [] # VirusTotal returns a dictionary when a single hash is queried # and a list when multiple hashes are queried. if isinstance(json_response, dict): json_response = [json_response] for result in json_response: resource = result['resource'] hash_analysis = interface.HashAnalysis(resource, result) hash_analyses.append(hash_analysis) return hash_analyses
[ "def", "Analyze", "(", "self", ",", "hashes", ")", ":", "if", "not", "self", ".", "_api_key", ":", "raise", "RuntimeError", "(", "'No API key specified for VirusTotal lookup.'", ")", "hash_analyses", "=", "[", "]", "json_response", "=", "self", ".", "_QueryHashes", "(", "hashes", ")", "or", "[", "]", "# VirusTotal returns a dictionary when a single hash is queried", "# and a list when multiple hashes are queried.", "if", "isinstance", "(", "json_response", ",", "dict", ")", ":", "json_response", "=", "[", "json_response", "]", "for", "result", "in", "json_response", ":", "resource", "=", "result", "[", "'resource'", "]", "hash_analysis", "=", "interface", ".", "HashAnalysis", "(", "resource", ",", "result", ")", "hash_analyses", ".", "append", "(", "hash_analysis", ")", "return", "hash_analyses" ]
28.181818
20.909091
def create_package(package_format, owner, repo, **kwargs): """Create a new package in a repository.""" client = get_packages_api() with catch_raise_api_exception(): upload = getattr(client, "packages_upload_%s_with_http_info" % package_format) data, _, headers = upload( owner=owner, repo=repo, data=make_create_payload(**kwargs) ) ratelimits.maybe_rate_limit(client, headers) return data.slug_perm, data.slug
[ "def", "create_package", "(", "package_format", ",", "owner", ",", "repo", ",", "*", "*", "kwargs", ")", ":", "client", "=", "get_packages_api", "(", ")", "with", "catch_raise_api_exception", "(", ")", ":", "upload", "=", "getattr", "(", "client", ",", "\"packages_upload_%s_with_http_info\"", "%", "package_format", ")", "data", ",", "_", ",", "headers", "=", "upload", "(", "owner", "=", "owner", ",", "repo", "=", "repo", ",", "data", "=", "make_create_payload", "(", "*", "*", "kwargs", ")", ")", "ratelimits", ".", "maybe_rate_limit", "(", "client", ",", "headers", ")", "return", "data", ".", "slug_perm", ",", "data", ".", "slug" ]
35.076923
21.153846
def _brentq_cdf(self, value): """Helper function to compute percent_point. As scipy.stats.gaussian_kde doesn't provide this functionality out of the box we need to make a numerical approach: - First we scalarize and bound cumulative_distribution. - Then we define a function `f(x) = cdf(x) - value`, where value is the given argument. - As value will be called from ppf we can assume value = cdf(z) for some z that is the value we are searching for. Therefore the zeros of the function will be x such that: cdf(x) - cdf(z) = 0 => (becasue cdf is monotonous and continous) x = z Args: value(float): cdf value, that is, in [0,1] Returns: callable: function whose zero is the ppf of value. """ # The decorator expects an instance method, but usually are decorated before being bounded bound_cdf = partial(scalarize(GaussianKDE.cumulative_distribution), self) def f(x): return bound_cdf(x) - value return f
[ "def", "_brentq_cdf", "(", "self", ",", "value", ")", ":", "# The decorator expects an instance method, but usually are decorated before being bounded", "bound_cdf", "=", "partial", "(", "scalarize", "(", "GaussianKDE", ".", "cumulative_distribution", ")", ",", "self", ")", "def", "f", "(", "x", ")", ":", "return", "bound_cdf", "(", "x", ")", "-", "value", "return", "f" ]
41.6
30.52
def getAttribute(self, attrName, defaultValue=None): ''' getAttribute - Gets an attribute on this tag. Be wary using this for classname, maybe use addClass/removeClass. Attribute names are all lowercase. @return - The attribute value, or None if none exists. ''' if attrName in TAG_ITEM_BINARY_ATTRIBUTES: if attrName in self._attributes: attrVal = self._attributes[attrName] if not attrVal: return True # Empty valued binary attribute return attrVal # optionally-valued binary attribute else: return False else: return self._attributes.get(attrName, defaultValue)
[ "def", "getAttribute", "(", "self", ",", "attrName", ",", "defaultValue", "=", "None", ")", ":", "if", "attrName", "in", "TAG_ITEM_BINARY_ATTRIBUTES", ":", "if", "attrName", "in", "self", ".", "_attributes", ":", "attrVal", "=", "self", ".", "_attributes", "[", "attrName", "]", "if", "not", "attrVal", ":", "return", "True", "# Empty valued binary attribute", "return", "attrVal", "# optionally-valued binary attribute", "else", ":", "return", "False", "else", ":", "return", "self", ".", "_attributes", ".", "get", "(", "attrName", ",", "defaultValue", ")" ]
42.941176
27.529412
def create(self, basedir, outdir, name, prefix=None, dereference=True): """ :API: public """ basedir = ensure_text(basedir) tarpath = os.path.join(outdir, '{}.{}'.format(ensure_text(name), self.extension)) with open_tar(tarpath, self.mode, dereference=dereference, errorlevel=1) as tar: tar.add(basedir, arcname=prefix or '.') return tarpath
[ "def", "create", "(", "self", ",", "basedir", ",", "outdir", ",", "name", ",", "prefix", "=", "None", ",", "dereference", "=", "True", ")", ":", "basedir", "=", "ensure_text", "(", "basedir", ")", "tarpath", "=", "os", ".", "path", ".", "join", "(", "outdir", ",", "'{}.{}'", ".", "format", "(", "ensure_text", "(", "name", ")", ",", "self", ".", "extension", ")", ")", "with", "open_tar", "(", "tarpath", ",", "self", ".", "mode", ",", "dereference", "=", "dereference", ",", "errorlevel", "=", "1", ")", "as", "tar", ":", "tar", ".", "add", "(", "basedir", ",", "arcname", "=", "prefix", "or", "'.'", ")", "return", "tarpath" ]
36.7
21.7
def list(self, teamId, max=None, **request_parameters): """List team memberships for a team, by ID. This method supports Webex Teams's implementation of RFC5988 Web Linking to provide pagination support. It returns a generator container that incrementally yields all team memberships returned by the query. The generator will automatically request additional 'pages' of responses from Webex as needed until all responses have been returned. The container makes the generator safe for reuse. A new API call will be made, using the same parameters that were specified when the generator was created, every time a new iterator is requested from the container. Args: teamId(basestring): List team memberships for a team, by ID. max(int): Limit the maximum number of items returned from the Webex Teams service per request. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: GeneratorContainer: A GeneratorContainer which, when iterated, yields the team memberships returned by the Webex Teams query. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(teamId, basestring, may_be_none=False) check_type(max, int) params = dict_from_items_with_values( request_parameters, teamId=teamId, max=max, ) # API request - get items items = self._session.get_items(API_ENDPOINT, params=params) # Yield team membership objects created from the returned items JSON # objects for item in items: yield self._object_factory(OBJECT_TYPE, item)
[ "def", "list", "(", "self", ",", "teamId", ",", "max", "=", "None", ",", "*", "*", "request_parameters", ")", ":", "check_type", "(", "teamId", ",", "basestring", ",", "may_be_none", "=", "False", ")", "check_type", "(", "max", ",", "int", ")", "params", "=", "dict_from_items_with_values", "(", "request_parameters", ",", "teamId", "=", "teamId", ",", "max", "=", "max", ",", ")", "# API request - get items", "items", "=", "self", ".", "_session", ".", "get_items", "(", "API_ENDPOINT", ",", "params", "=", "params", ")", "# Yield team membership objects created from the returned items JSON", "# objects", "for", "item", "in", "items", ":", "yield", "self", ".", "_object_factory", "(", "OBJECT_TYPE", ",", "item", ")" ]
41.933333
26.511111
def send_batches(self, batch_list): """Sends a list of batches to the validator. Args: batch_list (:obj:`BatchList`): the list of batches Returns: dict: the json result data, as a dict """ if isinstance(batch_list, BaseMessage): batch_list = batch_list.SerializeToString() return self._post('/batches', batch_list)
[ "def", "send_batches", "(", "self", ",", "batch_list", ")", ":", "if", "isinstance", "(", "batch_list", ",", "BaseMessage", ")", ":", "batch_list", "=", "batch_list", ".", "SerializeToString", "(", ")", "return", "self", ".", "_post", "(", "'/batches'", ",", "batch_list", ")" ]
29.923077
18.307692
def calc_resp(password_hash, server_challenge): """calc_resp generates the LM response given a 16-byte password hash and the challenge from the Type-2 message. @param password_hash 16-byte password hash @param server_challenge 8-byte challenge from Type-2 message returns 24-byte buffer to contain the LM response upon return """ # padding with zeros to make the hash 21 bytes long password_hash += b'\0' * (21 - len(password_hash)) res = b'' dobj = des.DES(password_hash[0:7]) res = res + dobj.encrypt(server_challenge[0:8]) dobj = des.DES(password_hash[7:14]) res = res + dobj.encrypt(server_challenge[0:8]) dobj = des.DES(password_hash[14:21]) res = res + dobj.encrypt(server_challenge[0:8]) return res
[ "def", "calc_resp", "(", "password_hash", ",", "server_challenge", ")", ":", "# padding with zeros to make the hash 21 bytes long", "password_hash", "+=", "b'\\0'", "*", "(", "21", "-", "len", "(", "password_hash", ")", ")", "res", "=", "b''", "dobj", "=", "des", ".", "DES", "(", "password_hash", "[", "0", ":", "7", "]", ")", "res", "=", "res", "+", "dobj", ".", "encrypt", "(", "server_challenge", "[", "0", ":", "8", "]", ")", "dobj", "=", "des", ".", "DES", "(", "password_hash", "[", "7", ":", "14", "]", ")", "res", "=", "res", "+", "dobj", ".", "encrypt", "(", "server_challenge", "[", "0", ":", "8", "]", ")", "dobj", "=", "des", ".", "DES", "(", "password_hash", "[", "14", ":", "21", "]", ")", "res", "=", "res", "+", "dobj", ".", "encrypt", "(", "server_challenge", "[", "0", ":", "8", "]", ")", "return", "res" ]
34.869565
14.478261
def get_ip(self, address): """ Get an IPAddress object with the IP address (string) from the API. e.g manager.get_ip('80.69.175.210') """ res = self.get_request('/ip_address/' + address) return IPAddress(cloud_manager=self, **res['ip_address'])
[ "def", "get_ip", "(", "self", ",", "address", ")", ":", "res", "=", "self", ".", "get_request", "(", "'/ip_address/'", "+", "address", ")", "return", "IPAddress", "(", "cloud_manager", "=", "self", ",", "*", "*", "res", "[", "'ip_address'", "]", ")" ]
35.75
16.5
def cond(pred, then_func, else_func): """Run an if-then-else using user-defined condition and computation This operator simulates a if-like branch which chooses to do one of the two customized computations according to the specified condition. `pred` is a scalar MXNet NDArray, indicating which branch of computation should be used. `then_func` is a user-defined function, used as computation of the then branch. It produces `outputs`, which is a list of NDArrays. The signature of `then_func` should be `then_func() => NDArray or nested List[NDArray]`. `else_func` is a user-defined function, used as computation of the else branch. It produces `outputs`, which is a list of NDArrays. The signature of `else_func` should be `else_func() => NDArray or nested List[NDArray]`. The `outputs` produces by `then_func` and `else_func` should have the same number of elements, all of which should be in the same shape, of the same dtype and stype. This function returns a list of symbols, representing the computation result. Parameters ---------- pred: a MXNet NDArray representing a scalar. The branch condition. then_func: a Python function. The computation to be executed if `pred` is true. else_func: a Python function. The computation to be executed if `pred` is false. Returns ------- outputs: an NDArray or nested lists of NDArrays, representing the result of computation. Examples -------- >>> a, b = mx.nd.array([1]), mx.nd.array([2]) >>> pred = a * b < 5 >>> then_func = lambda: (a + 5) * (b + 5) >>> else_func = lambda: (a - 5) * (b - 5) >>> outputs = mx.nd.contrib.cond(pred, then_func, else_func) >>> outputs[0] [42.] <NDArray 1 @cpu(0)> """ def _to_python_scalar(inputs, type_, name): """Converts "inputs", possibly typed mxnet NDArray, a numpy ndarray, other python types, to the given type """ if hasattr(inputs, "asscalar"): inputs = inputs.asscalar() try: inputs = type_(inputs) except: raise ValueError("Cannot convert %s to python %s" % (name, type_.__name__)) return inputs branch = _to_python_scalar(pred, bool, "pred") if branch: return then_func() else: return else_func()
[ "def", "cond", "(", "pred", ",", "then_func", ",", "else_func", ")", ":", "def", "_to_python_scalar", "(", "inputs", ",", "type_", ",", "name", ")", ":", "\"\"\"Converts \"inputs\", possibly typed mxnet NDArray, a numpy ndarray, other python types,\n to the given type\n \"\"\"", "if", "hasattr", "(", "inputs", ",", "\"asscalar\"", ")", ":", "inputs", "=", "inputs", ".", "asscalar", "(", ")", "try", ":", "inputs", "=", "type_", "(", "inputs", ")", "except", ":", "raise", "ValueError", "(", "\"Cannot convert %s to python %s\"", "%", "(", "name", ",", "type_", ".", "__name__", ")", ")", "return", "inputs", "branch", "=", "_to_python_scalar", "(", "pred", ",", "bool", ",", "\"pred\"", ")", "if", "branch", ":", "return", "then_func", "(", ")", "else", ":", "return", "else_func", "(", ")" ]
35.753846
21.953846
def debug_toolbar_callback(request): """Show the debug toolbar to those with the Django staff permission, excluding the Eighth Period office.""" if request.is_ajax(): return False if not hasattr(request, 'user'): return False if not request.user.is_authenticated: return False if not request.user.is_staff: return False if request.user.id == 9999: return False return "debug" in request.GET or settings.DEBUG
[ "def", "debug_toolbar_callback", "(", "request", ")", ":", "if", "request", ".", "is_ajax", "(", ")", ":", "return", "False", "if", "not", "hasattr", "(", "request", ",", "'user'", ")", ":", "return", "False", "if", "not", "request", ".", "user", ".", "is_authenticated", ":", "return", "False", "if", "not", "request", ".", "user", ".", "is_staff", ":", "return", "False", "if", "request", ".", "user", ".", "id", "==", "9999", ":", "return", "False", "return", "\"debug\"", "in", "request", ".", "GET", "or", "settings", ".", "DEBUG" ]
27.470588
15.941176
def execute_shell(self, shell=None, parent_environ=None, rcfile=None, norc=False, stdin=False, command=None, quiet=False, block=None, actions_callback=None, post_actions_callback=None, context_filepath=None, start_new_session=False, detached=False, pre_command=None, **Popen_args): """Spawn a possibly-interactive shell. Args: shell: Shell type, for eg 'bash'. If None, the current shell type is used. parent_environ: Environment to run the shell process in, if None then the current environment is used. rcfile: Specify a file to source instead of shell startup files. norc: If True, skip shell startup files, if possible. stdin: If True, read commands from stdin, in a non-interactive shell. command: If not None, execute this command in a non-interactive shell. If an empty string or list, don't run a command, but don't open an interactive shell either. Can be a list of args. quiet: If True, skip the welcome message in interactive shells. block: If True, block until the shell is terminated. If False, return immediately. If None, will default to blocking if the shell is interactive. actions_callback: Callback with signature (RexExecutor). This lets the user append custom actions to the context, such as setting extra environment variables. Callback is run prior to context Rex execution. post_actions_callback: Callback with signature (RexExecutor). This lets the user append custom actions to the context, such as setting extra environment variables. Callback is run after context Rex execution. context_filepath: If provided, the context file will be written here, rather than to the default location (which is in a tempdir). If you use this arg, you are responsible for cleaning up the file. start_new_session: If True, change the process group of the target process. Note that this may override the Popen_args keyword 'preexec_fn'. detached: If True, open a separate terminal. Note that this may override the `pre_command` argument. pre_command: Command to inject before the shell command itself. This is for internal use. Popen_args: args to pass to the shell process object constructor. Returns: If blocking: A 3-tuple of (returncode, stdout, stderr); If non-blocking - A subprocess.Popen object for the shell process. """ sh = create_shell(shell) if hasattr(command, "__iter__"): command = sh.join(command) # start a new session if specified if start_new_session: Popen_args.update(config.new_session_popen_args) # open a separate terminal if specified if detached: term_cmd = config.terminal_emulator_command if term_cmd: pre_command = term_cmd.strip().split() # block if the shell is likely to be interactive if block is None: block = not (command or stdin) # context and rxt files. If running detached, don't cleanup files, because # rez-env returns too early and deletes the tmp files before the detached # process can use them tmpdir = self.tmpdir_manager.mkdtemp(cleanup=not detached) if self.load_path and os.path.isfile(self.load_path): rxt_file = self.load_path else: rxt_file = os.path.join(tmpdir, "context.rxt") self.save(rxt_file) context_file = context_filepath or \ os.path.join(tmpdir, "context.%s" % sh.file_extension()) # interpret this context and write out the native context file executor = self._create_executor(sh, parent_environ) executor.env.REZ_RXT_FILE = rxt_file executor.env.REZ_CONTEXT_FILE = context_file if actions_callback: actions_callback(executor) self._execute(executor) if post_actions_callback: post_actions_callback(executor) context_code = executor.get_output() with open(context_file, 'w') as f: f.write(context_code) quiet = quiet or \ (RezToolsVisibility[config.rez_tools_visibility] == RezToolsVisibility.never) # spawn the shell subprocess p = sh.spawn_shell(context_file, tmpdir, rcfile=rcfile, norc=norc, stdin=stdin, command=command, env=parent_environ, quiet=quiet, pre_command=pre_command, **Popen_args) if block: stdout, stderr = p.communicate() return p.returncode, stdout, stderr else: return p
[ "def", "execute_shell", "(", "self", ",", "shell", "=", "None", ",", "parent_environ", "=", "None", ",", "rcfile", "=", "None", ",", "norc", "=", "False", ",", "stdin", "=", "False", ",", "command", "=", "None", ",", "quiet", "=", "False", ",", "block", "=", "None", ",", "actions_callback", "=", "None", ",", "post_actions_callback", "=", "None", ",", "context_filepath", "=", "None", ",", "start_new_session", "=", "False", ",", "detached", "=", "False", ",", "pre_command", "=", "None", ",", "*", "*", "Popen_args", ")", ":", "sh", "=", "create_shell", "(", "shell", ")", "if", "hasattr", "(", "command", ",", "\"__iter__\"", ")", ":", "command", "=", "sh", ".", "join", "(", "command", ")", "# start a new session if specified", "if", "start_new_session", ":", "Popen_args", ".", "update", "(", "config", ".", "new_session_popen_args", ")", "# open a separate terminal if specified", "if", "detached", ":", "term_cmd", "=", "config", ".", "terminal_emulator_command", "if", "term_cmd", ":", "pre_command", "=", "term_cmd", ".", "strip", "(", ")", ".", "split", "(", ")", "# block if the shell is likely to be interactive", "if", "block", "is", "None", ":", "block", "=", "not", "(", "command", "or", "stdin", ")", "# context and rxt files. If running detached, don't cleanup files, because", "# rez-env returns too early and deletes the tmp files before the detached", "# process can use them", "tmpdir", "=", "self", ".", "tmpdir_manager", ".", "mkdtemp", "(", "cleanup", "=", "not", "detached", ")", "if", "self", ".", "load_path", "and", "os", ".", "path", ".", "isfile", "(", "self", ".", "load_path", ")", ":", "rxt_file", "=", "self", ".", "load_path", "else", ":", "rxt_file", "=", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "\"context.rxt\"", ")", "self", ".", "save", "(", "rxt_file", ")", "context_file", "=", "context_filepath", "or", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "\"context.%s\"", "%", "sh", ".", "file_extension", "(", ")", ")", "# interpret this context and write out the native context file", "executor", "=", "self", ".", "_create_executor", "(", "sh", ",", "parent_environ", ")", "executor", ".", "env", ".", "REZ_RXT_FILE", "=", "rxt_file", "executor", ".", "env", ".", "REZ_CONTEXT_FILE", "=", "context_file", "if", "actions_callback", ":", "actions_callback", "(", "executor", ")", "self", ".", "_execute", "(", "executor", ")", "if", "post_actions_callback", ":", "post_actions_callback", "(", "executor", ")", "context_code", "=", "executor", ".", "get_output", "(", ")", "with", "open", "(", "context_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "context_code", ")", "quiet", "=", "quiet", "or", "(", "RezToolsVisibility", "[", "config", ".", "rez_tools_visibility", "]", "==", "RezToolsVisibility", ".", "never", ")", "# spawn the shell subprocess", "p", "=", "sh", ".", "spawn_shell", "(", "context_file", ",", "tmpdir", ",", "rcfile", "=", "rcfile", ",", "norc", "=", "norc", ",", "stdin", "=", "stdin", ",", "command", "=", "command", ",", "env", "=", "parent_environ", ",", "quiet", "=", "quiet", ",", "pre_command", "=", "pre_command", ",", "*", "*", "Popen_args", ")", "if", "block", ":", "stdout", ",", "stderr", "=", "p", ".", "communicate", "(", ")", "return", "p", ".", "returncode", ",", "stdout", ",", "stderr", "else", ":", "return", "p" ]
44.376068
22.350427
def overrides_a_method(class_node: astroid.node_classes.NodeNG, name: str) -> bool: """return True if <name> is a method overridden from an ancestor""" for ancestor in class_node.ancestors(): if name in ancestor and isinstance(ancestor[name], astroid.FunctionDef): return True return False
[ "def", "overrides_a_method", "(", "class_node", ":", "astroid", ".", "node_classes", ".", "NodeNG", ",", "name", ":", "str", ")", "->", "bool", ":", "for", "ancestor", "in", "class_node", ".", "ancestors", "(", ")", ":", "if", "name", "in", "ancestor", "and", "isinstance", "(", "ancestor", "[", "name", "]", ",", "astroid", ".", "FunctionDef", ")", ":", "return", "True", "return", "False" ]
52.666667
21.166667
def _shorten_url(self, text): '''Shorten a URL and make sure to not cut of html entities.''' if len(text) > self._max_url_length and self._max_url_length != -1: text = text[0:self._max_url_length - 3] amp = text.rfind('&') close = text.rfind(';') if amp != -1 and (close == -1 or close < amp): text = text[0:amp] return text + '...' else: return text
[ "def", "_shorten_url", "(", "self", ",", "text", ")", ":", "if", "len", "(", "text", ")", ">", "self", ".", "_max_url_length", "and", "self", ".", "_max_url_length", "!=", "-", "1", ":", "text", "=", "text", "[", "0", ":", "self", ".", "_max_url_length", "-", "3", "]", "amp", "=", "text", ".", "rfind", "(", "'&'", ")", "close", "=", "text", ".", "rfind", "(", "';'", ")", "if", "amp", "!=", "-", "1", "and", "(", "close", "==", "-", "1", "or", "close", "<", "amp", ")", ":", "text", "=", "text", "[", "0", ":", "amp", "]", "return", "text", "+", "'...'", "else", ":", "return", "text" ]
32.285714
21.142857
def stream(self, model, position): """Create a :class:`~bloop.stream.Stream` that provides approximate chronological ordering. .. code-block:: pycon # Create a user so we have a record >>> engine = Engine() >>> user = User(id=3, email="user@domain.com") >>> engine.save(user) >>> user.email = "admin@domain.com" >>> engine.save(user) # First record lacks an "old" value since it's an insert >>> stream = engine.stream(User, "trim_horizon") >>> next(stream) {'key': None, 'old': None, 'new': User(email='user@domain.com', id=3, verified=None), 'meta': { 'created_at': datetime.datetime(2016, 10, 23, ...), 'event': { 'id': '3fe6d339b7cb19a1474b3d853972c12a', 'type': 'insert', 'version': '1.1'}, 'sequence_number': '700000000007366876916'} } :param model: The model to stream records from. :param position: "trim_horizon", "latest", a stream token, or a :class:`datetime.datetime`. :return: An iterator for records in all shards. :rtype: :class:`~bloop.stream.Stream` :raises bloop.exceptions.InvalidStream: if the model does not have a stream. """ validate_not_abstract(model) if not model.Meta.stream or not model.Meta.stream.get("arn"): raise InvalidStream("{!r} does not have a stream arn".format(model)) stream = Stream(model=model, engine=self) stream.move_to(position=position) return stream
[ "def", "stream", "(", "self", ",", "model", ",", "position", ")", ":", "validate_not_abstract", "(", "model", ")", "if", "not", "model", ".", "Meta", ".", "stream", "or", "not", "model", ".", "Meta", ".", "stream", ".", "get", "(", "\"arn\"", ")", ":", "raise", "InvalidStream", "(", "\"{!r} does not have a stream arn\"", ".", "format", "(", "model", ")", ")", "stream", "=", "Stream", "(", "model", "=", "model", ",", "engine", "=", "self", ")", "stream", ".", "move_to", "(", "position", "=", "position", ")", "return", "stream" ]
41.6
19.05
def get_oauth_url(self): """ Returns the URL with OAuth params """ params = OrderedDict() if "?" in self.url: url = self.url[:self.url.find("?")] for key, value in parse_qsl(urlparse(self.url).query): params[key] = value else: url = self.url params["oauth_consumer_key"] = self.consumer_key params["oauth_timestamp"] = self.timestamp params["oauth_nonce"] = self.generate_nonce() params["oauth_signature_method"] = "HMAC-SHA256" params["oauth_signature"] = self.generate_oauth_signature(params, url) query_string = urlencode(params) return "%s?%s" % (url, query_string)
[ "def", "get_oauth_url", "(", "self", ")", ":", "params", "=", "OrderedDict", "(", ")", "if", "\"?\"", "in", "self", ".", "url", ":", "url", "=", "self", ".", "url", "[", ":", "self", ".", "url", ".", "find", "(", "\"?\"", ")", "]", "for", "key", ",", "value", "in", "parse_qsl", "(", "urlparse", "(", "self", ".", "url", ")", ".", "query", ")", ":", "params", "[", "key", "]", "=", "value", "else", ":", "url", "=", "self", ".", "url", "params", "[", "\"oauth_consumer_key\"", "]", "=", "self", ".", "consumer_key", "params", "[", "\"oauth_timestamp\"", "]", "=", "self", ".", "timestamp", "params", "[", "\"oauth_nonce\"", "]", "=", "self", ".", "generate_nonce", "(", ")", "params", "[", "\"oauth_signature_method\"", "]", "=", "\"HMAC-SHA256\"", "params", "[", "\"oauth_signature\"", "]", "=", "self", ".", "generate_oauth_signature", "(", "params", ",", "url", ")", "query_string", "=", "urlencode", "(", "params", ")", "return", "\"%s?%s\"", "%", "(", "url", ",", "query_string", ")" ]
34.7
18.75
def parse_condition(self, query, prev_key=None, last_prev_key=None): """ Creates a recursive generator for parsing some types of Query() conditions :param query: Query object :param prev_key: The key at the next-higher level :return: generator object, the last of which will be the complete Query() object containing all conditions """ # use this to determine gt/lt/eq on prev_query logger.debug(u'query: {} prev_query: {}'.format(query, prev_key)) q = Query() conditions = None # deal with the {'name': value} case by injecting a previous key if not prev_key: temp_query = copy.deepcopy(query) k, v = temp_query.popitem() prev_key = k # deal with the conditions for key, value in query.items(): logger.debug(u'conditions: {} {}'.format(key, value)) if key == u'$gte': conditions = ( Q(q, prev_key) >= value ) if not conditions and prev_key != "$not" \ else (conditions & (Q(q, prev_key) >= value)) if prev_key != "$not" \ else (q[last_prev_key] < value) elif key == u'$gt': conditions = ( Q(q, prev_key) > value ) if not conditions and prev_key != "$not" \ else (conditions & (Q(q, prev_key) > value)) if prev_key != "$not" \ else (q[last_prev_key] <= value) elif key == u'$lte': conditions = ( Q(q, prev_key) <= value ) if not conditions and prev_key != "$not" \ else (conditions & (Q(q, prev_key) <= value)) if prev_key != "$not" \ else (q[last_prev_key] > value) elif key == u'$lt': conditions = ( Q(q, prev_key) < value ) if not conditions and prev_key != "$not" \ else (conditions & (Q(q, prev_key) < value)) if prev_key != "$not" \ else (q[last_prev_key] >= value) elif key == u'$ne': conditions = ( Q(q, prev_key) != value ) if not conditions and prev_key != "$not" \ else (conditions & (Q(q, prev_key) != value))if prev_key != "$not" \ else (q[last_prev_key] == value) elif key == u'$not': if not isinstance(value, dict) and not isinstance(value, list): conditions = ( Q(q, prev_key) != value ) if not conditions and prev_key != "$not" \ else (conditions & (Q(q, prev_key) != value)) \ if prev_key != "$not" else (q[last_prev_key] >= value) else: # let the value's condition be parsed below pass elif key == u'$regex': value = value.replace('\\\\\\', '|||') value = value.replace('\\\\', '|||') regex = value.replace('\\', '') regex = regex.replace('|||', '\\') currCond = (where(prev_key).matches(regex)) conditions = currCond if not conditions else (conditions & currCond) elif key in ['$and', '$or', '$in', '$all']: pass else: # don't want to use the previous key if this is a secondary key # (fixes multiple item query that includes $ codes) if not isinstance(value, dict) and not isinstance(value, list): conditions = ( (Q(q, key) == value) | (Q(q, key).any([value])) ) if not conditions else (conditions & ((Q(q, key) == value) | (Q(q, key).any([value])))) prev_key = key logger.debug(u'c: {}'.format(conditions)) if isinstance(value, dict): # yield from self.parse_condition(value, key) for parse_condition in self.parse_condition(value, key, prev_key): yield parse_condition elif isinstance(value, list): if key == '$and': grouped_conditions = None for spec in value: for parse_condition in self.parse_condition(spec): grouped_conditions = ( parse_condition if not grouped_conditions else grouped_conditions & parse_condition ) yield grouped_conditions elif key == '$or': grouped_conditions = None for spec in value: for parse_condition in self.parse_condition(spec): grouped_conditions = ( parse_condition if not grouped_conditions else grouped_conditions | parse_condition ) yield grouped_conditions elif key == '$in': # use `any` to find with list, before comparing to single string grouped_conditions = Q(q, prev_key).any(value) for val in value: for parse_condition in self.parse_condition({prev_key : val}): grouped_conditions = ( parse_condition if not grouped_conditions else grouped_conditions | parse_condition ) yield grouped_conditions elif key == '$all': yield Q(q, prev_key).all(value) else: yield Q(q, prev_key).any([value]) else: yield conditions
[ "def", "parse_condition", "(", "self", ",", "query", ",", "prev_key", "=", "None", ",", "last_prev_key", "=", "None", ")", ":", "# use this to determine gt/lt/eq on prev_query", "logger", ".", "debug", "(", "u'query: {} prev_query: {}'", ".", "format", "(", "query", ",", "prev_key", ")", ")", "q", "=", "Query", "(", ")", "conditions", "=", "None", "# deal with the {'name': value} case by injecting a previous key", "if", "not", "prev_key", ":", "temp_query", "=", "copy", ".", "deepcopy", "(", "query", ")", "k", ",", "v", "=", "temp_query", ".", "popitem", "(", ")", "prev_key", "=", "k", "# deal with the conditions", "for", "key", ",", "value", "in", "query", ".", "items", "(", ")", ":", "logger", ".", "debug", "(", "u'conditions: {} {}'", ".", "format", "(", "key", ",", "value", ")", ")", "if", "key", "==", "u'$gte'", ":", "conditions", "=", "(", "Q", "(", "q", ",", "prev_key", ")", ">=", "value", ")", "if", "not", "conditions", "and", "prev_key", "!=", "\"$not\"", "else", "(", "conditions", "&", "(", "Q", "(", "q", ",", "prev_key", ")", ">=", "value", ")", ")", "if", "prev_key", "!=", "\"$not\"", "else", "(", "q", "[", "last_prev_key", "]", "<", "value", ")", "elif", "key", "==", "u'$gt'", ":", "conditions", "=", "(", "Q", "(", "q", ",", "prev_key", ")", ">", "value", ")", "if", "not", "conditions", "and", "prev_key", "!=", "\"$not\"", "else", "(", "conditions", "&", "(", "Q", "(", "q", ",", "prev_key", ")", ">", "value", ")", ")", "if", "prev_key", "!=", "\"$not\"", "else", "(", "q", "[", "last_prev_key", "]", "<=", "value", ")", "elif", "key", "==", "u'$lte'", ":", "conditions", "=", "(", "Q", "(", "q", ",", "prev_key", ")", "<=", "value", ")", "if", "not", "conditions", "and", "prev_key", "!=", "\"$not\"", "else", "(", "conditions", "&", "(", "Q", "(", "q", ",", "prev_key", ")", "<=", "value", ")", ")", "if", "prev_key", "!=", "\"$not\"", "else", "(", "q", "[", "last_prev_key", "]", ">", "value", ")", "elif", "key", "==", "u'$lt'", ":", "conditions", "=", "(", "Q", "(", "q", ",", "prev_key", ")", "<", "value", ")", "if", "not", "conditions", "and", "prev_key", "!=", "\"$not\"", "else", "(", "conditions", "&", "(", "Q", "(", "q", ",", "prev_key", ")", "<", "value", ")", ")", "if", "prev_key", "!=", "\"$not\"", "else", "(", "q", "[", "last_prev_key", "]", ">=", "value", ")", "elif", "key", "==", "u'$ne'", ":", "conditions", "=", "(", "Q", "(", "q", ",", "prev_key", ")", "!=", "value", ")", "if", "not", "conditions", "and", "prev_key", "!=", "\"$not\"", "else", "(", "conditions", "&", "(", "Q", "(", "q", ",", "prev_key", ")", "!=", "value", ")", ")", "if", "prev_key", "!=", "\"$not\"", "else", "(", "q", "[", "last_prev_key", "]", "==", "value", ")", "elif", "key", "==", "u'$not'", ":", "if", "not", "isinstance", "(", "value", ",", "dict", ")", "and", "not", "isinstance", "(", "value", ",", "list", ")", ":", "conditions", "=", "(", "Q", "(", "q", ",", "prev_key", ")", "!=", "value", ")", "if", "not", "conditions", "and", "prev_key", "!=", "\"$not\"", "else", "(", "conditions", "&", "(", "Q", "(", "q", ",", "prev_key", ")", "!=", "value", ")", ")", "if", "prev_key", "!=", "\"$not\"", "else", "(", "q", "[", "last_prev_key", "]", ">=", "value", ")", "else", ":", "# let the value's condition be parsed below", "pass", "elif", "key", "==", "u'$regex'", ":", "value", "=", "value", ".", "replace", "(", "'\\\\\\\\\\\\'", ",", "'|||'", ")", "value", "=", "value", ".", "replace", "(", "'\\\\\\\\'", ",", "'|||'", ")", "regex", "=", "value", ".", "replace", "(", "'\\\\'", ",", "''", ")", "regex", "=", "regex", ".", "replace", "(", "'|||'", ",", "'\\\\'", ")", "currCond", "=", "(", "where", "(", "prev_key", ")", ".", "matches", "(", "regex", ")", ")", "conditions", "=", "currCond", "if", "not", "conditions", "else", "(", "conditions", "&", "currCond", ")", "elif", "key", "in", "[", "'$and'", ",", "'$or'", ",", "'$in'", ",", "'$all'", "]", ":", "pass", "else", ":", "# don't want to use the previous key if this is a secondary key", "# (fixes multiple item query that includes $ codes)", "if", "not", "isinstance", "(", "value", ",", "dict", ")", "and", "not", "isinstance", "(", "value", ",", "list", ")", ":", "conditions", "=", "(", "(", "Q", "(", "q", ",", "key", ")", "==", "value", ")", "|", "(", "Q", "(", "q", ",", "key", ")", ".", "any", "(", "[", "value", "]", ")", ")", ")", "if", "not", "conditions", "else", "(", "conditions", "&", "(", "(", "Q", "(", "q", ",", "key", ")", "==", "value", ")", "|", "(", "Q", "(", "q", ",", "key", ")", ".", "any", "(", "[", "value", "]", ")", ")", ")", ")", "prev_key", "=", "key", "logger", ".", "debug", "(", "u'c: {}'", ".", "format", "(", "conditions", ")", ")", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "# yield from self.parse_condition(value, key)", "for", "parse_condition", "in", "self", ".", "parse_condition", "(", "value", ",", "key", ",", "prev_key", ")", ":", "yield", "parse_condition", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "if", "key", "==", "'$and'", ":", "grouped_conditions", "=", "None", "for", "spec", "in", "value", ":", "for", "parse_condition", "in", "self", ".", "parse_condition", "(", "spec", ")", ":", "grouped_conditions", "=", "(", "parse_condition", "if", "not", "grouped_conditions", "else", "grouped_conditions", "&", "parse_condition", ")", "yield", "grouped_conditions", "elif", "key", "==", "'$or'", ":", "grouped_conditions", "=", "None", "for", "spec", "in", "value", ":", "for", "parse_condition", "in", "self", ".", "parse_condition", "(", "spec", ")", ":", "grouped_conditions", "=", "(", "parse_condition", "if", "not", "grouped_conditions", "else", "grouped_conditions", "|", "parse_condition", ")", "yield", "grouped_conditions", "elif", "key", "==", "'$in'", ":", "# use `any` to find with list, before comparing to single string", "grouped_conditions", "=", "Q", "(", "q", ",", "prev_key", ")", ".", "any", "(", "value", ")", "for", "val", "in", "value", ":", "for", "parse_condition", "in", "self", ".", "parse_condition", "(", "{", "prev_key", ":", "val", "}", ")", ":", "grouped_conditions", "=", "(", "parse_condition", "if", "not", "grouped_conditions", "else", "grouped_conditions", "|", "parse_condition", ")", "yield", "grouped_conditions", "elif", "key", "==", "'$all'", ":", "yield", "Q", "(", "q", ",", "prev_key", ")", ".", "all", "(", "value", ")", "else", ":", "yield", "Q", "(", "q", ",", "prev_key", ")", ".", "any", "(", "[", "value", "]", ")", "else", ":", "yield", "conditions" ]
46.100775
18.069767
def searchString( self, instring, maxMatches=_MAX_INT ): """Another extension to scanString, simplifying the access to the tokens found to match the given parse expression. May be called with optional maxMatches argument, to clip searching after 'n' matches are found. """ return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
[ "def", "searchString", "(", "self", ",", "instring", ",", "maxMatches", "=", "_MAX_INT", ")", ":", "return", "ParseResults", "(", "[", "t", "for", "t", ",", "s", ",", "e", "in", "self", ".", "scanString", "(", "instring", ",", "maxMatches", ")", "]", ")" ]
65.666667
22.833333
def _mul16(ins): ''' Multiplies tow last 16bit values on top of the stack and and returns the value on top of the stack Optimizations: * If any of the ops is ZERO, then do A = 0 ==> XOR A, cause A * 0 = 0 * A = 0 * If any ot the ops is ONE, do NOTHING A * 1 = 1 * A = A * If B is 2^n and B < 16 => Shift Right n ''' op1, op2 = tuple(ins.quad[2:]) if _int_ops(op1, op2) is not None: # If any of the operands is constant op1, op2 = _int_ops(op1, op2) # put the constant one the 2nd output = _16bit_oper(op1) if op2 == 0: # A * 0 = 0 * A = 0 if op1[0] in ('_', '$'): output = [] # Optimization: Discard previous op if not from the stack output.append('ld hl, 0') output.append('push hl') return output if op2 == 1: # A * 1 = 1 * A == A => Do nothing output.append('push hl') return output if op2 == 0xFFFF: # This is the same as (-1) output.append('call __NEGHL') output.append('push hl') REQUIRES.add('neg16.asm') return output if is_2n(op2) and log2(op2) < 4: output.extend(['add hl, hl'] * int(log2(op2))) output.append('push hl') return output output.append('ld de, %i' % op2) else: if op2[0] == '_': # stack optimization op1, op2 = op2, op1 output = _16bit_oper(op1, op2) output.append('call __MUL16_FAST') # Inmmediate output.append('push hl') REQUIRES.add('mul16.asm') return output
[ "def", "_mul16", "(", "ins", ")", ":", "op1", ",", "op2", "=", "tuple", "(", "ins", ".", "quad", "[", "2", ":", "]", ")", "if", "_int_ops", "(", "op1", ",", "op2", ")", "is", "not", "None", ":", "# If any of the operands is constant", "op1", ",", "op2", "=", "_int_ops", "(", "op1", ",", "op2", ")", "# put the constant one the 2nd", "output", "=", "_16bit_oper", "(", "op1", ")", "if", "op2", "==", "0", ":", "# A * 0 = 0 * A = 0", "if", "op1", "[", "0", "]", "in", "(", "'_'", ",", "'$'", ")", ":", "output", "=", "[", "]", "# Optimization: Discard previous op if not from the stack", "output", ".", "append", "(", "'ld hl, 0'", ")", "output", ".", "append", "(", "'push hl'", ")", "return", "output", "if", "op2", "==", "1", ":", "# A * 1 = 1 * A == A => Do nothing", "output", ".", "append", "(", "'push hl'", ")", "return", "output", "if", "op2", "==", "0xFFFF", ":", "# This is the same as (-1)", "output", ".", "append", "(", "'call __NEGHL'", ")", "output", ".", "append", "(", "'push hl'", ")", "REQUIRES", ".", "add", "(", "'neg16.asm'", ")", "return", "output", "if", "is_2n", "(", "op2", ")", "and", "log2", "(", "op2", ")", "<", "4", ":", "output", ".", "extend", "(", "[", "'add hl, hl'", "]", "*", "int", "(", "log2", "(", "op2", ")", ")", ")", "output", ".", "append", "(", "'push hl'", ")", "return", "output", "output", ".", "append", "(", "'ld de, %i'", "%", "op2", ")", "else", ":", "if", "op2", "[", "0", "]", "==", "'_'", ":", "# stack optimization", "op1", ",", "op2", "=", "op2", ",", "op1", "output", "=", "_16bit_oper", "(", "op1", ",", "op2", ")", "output", ".", "append", "(", "'call __MUL16_FAST'", ")", "# Inmmediate", "output", ".", "append", "(", "'push hl'", ")", "REQUIRES", ".", "add", "(", "'mul16.asm'", ")", "return", "output" ]
31.196078
18.137255
def covar_plotter3d_matplotlib(embedding, rieman_metric, inspect_points_idx, ax, colors): """3 Dimensional Covariance plotter using matplotlib backend.""" for pts_idx in inspect_points_idx: plot_ellipse_matplotlib( cov=rieman_metric[pts_idx], pos=embedding[pts_idx], ax=ax, ec='k', lw=1, color=colors[pts_idx] ) return ax
[ "def", "covar_plotter3d_matplotlib", "(", "embedding", ",", "rieman_metric", ",", "inspect_points_idx", ",", "ax", ",", "colors", ")", ":", "for", "pts_idx", "in", "inspect_points_idx", ":", "plot_ellipse_matplotlib", "(", "cov", "=", "rieman_metric", "[", "pts_idx", "]", ",", "pos", "=", "embedding", "[", "pts_idx", "]", ",", "ax", "=", "ax", ",", "ec", "=", "'k'", ",", "lw", "=", "1", ",", "color", "=", "colors", "[", "pts_idx", "]", ")", "return", "ax" ]
50.111111
13.666667
def make_comparison_png(self, outpath=None, include_legend=False): """ Creates a thematic map image with a three color beside it :param outpath: if specified, will save the image instead of showing it :param include_legend: if true will include the thamatic map label legend """ from matplotlib.patches import Patch fig, axs = plt.subplots(ncols=2, sharex=True, sharey=True) three_color = self.make_three_color() axs[0].imshow(three_color) axs[0].set_axis_off() shape = self.thmap.shape axs[1].imshow(self.thmap, origin='lower', interpolation='nearest', cmap=self.config.solar_cmap, vmin=-1, vmax=len(self.config.solar_classes)-1) if include_legend: legend_elements = [Patch(facecolor=c, label=sc, edgecolor='k') for sc, c in self.config.solar_colors.items()] axs[1].legend(handles=legend_elements, fontsize='x-small', bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.) axs[1].set_xlim([0, shape[0]]) axs[1].set_ylim([0, shape[0]]) axs[1].set_aspect("equal") axs[1].set_axis_off() if outpath: fig.savefig(outpath, dpi=300, transparent=True, bbox_inches='tight', pad_inches=0.) plt.close() else: plt.show()
[ "def", "make_comparison_png", "(", "self", ",", "outpath", "=", "None", ",", "include_legend", "=", "False", ")", ":", "from", "matplotlib", ".", "patches", "import", "Patch", "fig", ",", "axs", "=", "plt", ".", "subplots", "(", "ncols", "=", "2", ",", "sharex", "=", "True", ",", "sharey", "=", "True", ")", "three_color", "=", "self", ".", "make_three_color", "(", ")", "axs", "[", "0", "]", ".", "imshow", "(", "three_color", ")", "axs", "[", "0", "]", ".", "set_axis_off", "(", ")", "shape", "=", "self", ".", "thmap", ".", "shape", "axs", "[", "1", "]", ".", "imshow", "(", "self", ".", "thmap", ",", "origin", "=", "'lower'", ",", "interpolation", "=", "'nearest'", ",", "cmap", "=", "self", ".", "config", ".", "solar_cmap", ",", "vmin", "=", "-", "1", ",", "vmax", "=", "len", "(", "self", ".", "config", ".", "solar_classes", ")", "-", "1", ")", "if", "include_legend", ":", "legend_elements", "=", "[", "Patch", "(", "facecolor", "=", "c", ",", "label", "=", "sc", ",", "edgecolor", "=", "'k'", ")", "for", "sc", ",", "c", "in", "self", ".", "config", ".", "solar_colors", ".", "items", "(", ")", "]", "axs", "[", "1", "]", ".", "legend", "(", "handles", "=", "legend_elements", ",", "fontsize", "=", "'x-small'", ",", "bbox_to_anchor", "=", "(", "0.", ",", "1.02", ",", "1.", ",", ".102", ")", ",", "loc", "=", "3", ",", "ncol", "=", "2", ",", "mode", "=", "\"expand\"", ",", "borderaxespad", "=", "0.", ")", "axs", "[", "1", "]", ".", "set_xlim", "(", "[", "0", ",", "shape", "[", "0", "]", "]", ")", "axs", "[", "1", "]", ".", "set_ylim", "(", "[", "0", ",", "shape", "[", "0", "]", "]", ")", "axs", "[", "1", "]", ".", "set_aspect", "(", "\"equal\"", ")", "axs", "[", "1", "]", ".", "set_axis_off", "(", ")", "if", "outpath", ":", "fig", ".", "savefig", "(", "outpath", ",", "dpi", "=", "300", ",", "transparent", "=", "True", ",", "bbox_inches", "=", "'tight'", ",", "pad_inches", "=", "0.", ")", "plt", ".", "close", "(", ")", "else", ":", "plt", ".", "show", "(", ")" ]
38.875
18.125
def load_effects(self, patients=None, only_nonsynonymous=False, all_effects=False, filter_fn=None, **kwargs): """Load a dictionary of patient_id to varcode.EffectCollection Note that this only loads one effect per variant. Parameters ---------- patients : str, optional Filter to a subset of patients only_nonsynonymous : bool, optional If true, load only nonsynonymous effects, default False all_effects : bool, optional If true, return all effects rather than only the top-priority effect per variant filter_fn : function Takes a FilterableEffect and returns a boolean. Only effects returning True are preserved. Overrides default self.filter_fn. `None` passes through to self.filter_fn. Returns ------- effects Dictionary of patient_id to varcode.EffectCollection """ filter_fn = first_not_none_param([filter_fn, self.filter_fn], no_filter) filter_fn_name = self._get_function_name(filter_fn) logger.debug("loading effects with filter_fn {}".format(filter_fn_name)) patient_effects = {} for patient in self.iter_patients(patients): effects = self._load_single_patient_effects( patient, only_nonsynonymous, all_effects, filter_fn, **kwargs) if effects is not None: patient_effects[patient.id] = effects return patient_effects
[ "def", "load_effects", "(", "self", ",", "patients", "=", "None", ",", "only_nonsynonymous", "=", "False", ",", "all_effects", "=", "False", ",", "filter_fn", "=", "None", ",", "*", "*", "kwargs", ")", ":", "filter_fn", "=", "first_not_none_param", "(", "[", "filter_fn", ",", "self", ".", "filter_fn", "]", ",", "no_filter", ")", "filter_fn_name", "=", "self", ".", "_get_function_name", "(", "filter_fn", ")", "logger", ".", "debug", "(", "\"loading effects with filter_fn {}\"", ".", "format", "(", "filter_fn_name", ")", ")", "patient_effects", "=", "{", "}", "for", "patient", "in", "self", ".", "iter_patients", "(", "patients", ")", ":", "effects", "=", "self", ".", "_load_single_patient_effects", "(", "patient", ",", "only_nonsynonymous", ",", "all_effects", ",", "filter_fn", ",", "*", "*", "kwargs", ")", "if", "effects", "is", "not", "None", ":", "patient_effects", "[", "patient", ".", "id", "]", "=", "effects", "return", "patient_effects" ]
45.212121
22.757576
def render(data, saltenv='base', sls='', argline='', **kwargs): # pylint: disable=unused-argument ''' Decrypt the data to be rendered that was encrypted using AWS KMS envelope encryption. ''' translate_newlines = kwargs.get('translate_newlines', False) return _decrypt_object(data, translate_newlines=translate_newlines)
[ "def", "render", "(", "data", ",", "saltenv", "=", "'base'", ",", "sls", "=", "''", ",", "argline", "=", "''", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=unused-argument", "translate_newlines", "=", "kwargs", ".", "get", "(", "'translate_newlines'", ",", "False", ")", "return", "_decrypt_object", "(", "data", ",", "translate_newlines", "=", "translate_newlines", ")" ]
56
38
def _resolve_default(data_type, listify=False): """Retrieve the default value for a given data type.""" if isinstance(data_type, _ATOMIC): # A Python's object type needs to be left as is instead of being # wrapped into a NumPy type. out = (data_type.default if isinstance(data_type, Object) else _get_atomic_dtype(data_type)(data_type.default)) elif isinstance(data_type, Array): element_default = _resolve_default(data_type.element_type, listify=listify) Sequence = list if listify else tuple shape = ((data_type.shape,) if isinstance(data_type.shape, int) else data_type.shape) out = element_default for dimension in shape: out = Sequence(copy.deepcopy(out) for _ in _range(dimension)) elif isinstance(data_type, Structure): if listify: out = [_resolve_default(field.type, listify=listify) for field in data_type.fields] else: field_defaults = collections.OrderedDict( (field.name, _resolve_default(field.type, listify=listify)) for field in data_type.fields) name = ('StructureDefault_%s' % (data_type.name,) if data_type.name else 'StructureDefault') struct = collections.namedtuple(name, field_defaults.keys()) out = struct(**field_defaults) return out
[ "def", "_resolve_default", "(", "data_type", ",", "listify", "=", "False", ")", ":", "if", "isinstance", "(", "data_type", ",", "_ATOMIC", ")", ":", "# A Python's object type needs to be left as is instead of being", "# wrapped into a NumPy type.", "out", "=", "(", "data_type", ".", "default", "if", "isinstance", "(", "data_type", ",", "Object", ")", "else", "_get_atomic_dtype", "(", "data_type", ")", "(", "data_type", ".", "default", ")", ")", "elif", "isinstance", "(", "data_type", ",", "Array", ")", ":", "element_default", "=", "_resolve_default", "(", "data_type", ".", "element_type", ",", "listify", "=", "listify", ")", "Sequence", "=", "list", "if", "listify", "else", "tuple", "shape", "=", "(", "(", "data_type", ".", "shape", ",", ")", "if", "isinstance", "(", "data_type", ".", "shape", ",", "int", ")", "else", "data_type", ".", "shape", ")", "out", "=", "element_default", "for", "dimension", "in", "shape", ":", "out", "=", "Sequence", "(", "copy", ".", "deepcopy", "(", "out", ")", "for", "_", "in", "_range", "(", "dimension", ")", ")", "elif", "isinstance", "(", "data_type", ",", "Structure", ")", ":", "if", "listify", ":", "out", "=", "[", "_resolve_default", "(", "field", ".", "type", ",", "listify", "=", "listify", ")", "for", "field", "in", "data_type", ".", "fields", "]", "else", ":", "field_defaults", "=", "collections", ".", "OrderedDict", "(", "(", "field", ".", "name", ",", "_resolve_default", "(", "field", ".", "type", ",", "listify", "=", "listify", ")", ")", "for", "field", "in", "data_type", ".", "fields", ")", "name", "=", "(", "'StructureDefault_%s'", "%", "(", "data_type", ".", "name", ",", ")", "if", "data_type", ".", "name", "else", "'StructureDefault'", ")", "struct", "=", "collections", ".", "namedtuple", "(", "name", ",", "field_defaults", ".", "keys", "(", ")", ")", "out", "=", "struct", "(", "*", "*", "field_defaults", ")", "return", "out" ]
48.233333
17.2
def get_cached_moderated_reddits(self): """Return a cached dictionary of the user's moderated reddits. This list is used internally. Consider using the `get_my_moderation` function instead. """ if self._mod_subs is None: self._mod_subs = {'mod': self.reddit_session.get_subreddit('mod')} for sub in self.reddit_session.get_my_moderation(limit=None): self._mod_subs[six.text_type(sub).lower()] = sub return self._mod_subs
[ "def", "get_cached_moderated_reddits", "(", "self", ")", ":", "if", "self", ".", "_mod_subs", "is", "None", ":", "self", ".", "_mod_subs", "=", "{", "'mod'", ":", "self", ".", "reddit_session", ".", "get_subreddit", "(", "'mod'", ")", "}", "for", "sub", "in", "self", ".", "reddit_session", ".", "get_my_moderation", "(", "limit", "=", "None", ")", ":", "self", ".", "_mod_subs", "[", "six", ".", "text_type", "(", "sub", ")", ".", "lower", "(", ")", "]", "=", "sub", "return", "self", ".", "_mod_subs" ]
41.583333
20.333333
def to_csv(self, filename, stimuli=None, inhibitors=None, prepend=""): """ Writes the list of clampings to a CSV file Parameters ---------- filename : str Absolute path where to write the CSV file stimuli : Optional[list[str]] List of stimuli names. If given, stimuli are converted to {0,1} instead of {-1,1}. inhibitors : Optional[list[str]] List of inhibitors names. If given, inhibitors are renamed and converted to {0,1} instead of {-1,1}. prepend : str Columns are renamed using the given string at the beginning """ self.to_dataframe(stimuli, inhibitors, prepend).to_csv(filename, index=False)
[ "def", "to_csv", "(", "self", ",", "filename", ",", "stimuli", "=", "None", ",", "inhibitors", "=", "None", ",", "prepend", "=", "\"\"", ")", ":", "self", ".", "to_dataframe", "(", "stimuli", ",", "inhibitors", ",", "prepend", ")", ".", "to_csv", "(", "filename", ",", "index", "=", "False", ")" ]
37.526316
26.263158
def get_agenda_for_sentence(self, sentence: str) -> List[str]: """ Given a ``sentence``, returns a list of actions the sentence triggers as an ``agenda``. The ``agenda`` can be used while by a parser to guide the decoder. sequences as possible. This is a simplistic mapping at this point, and can be expanded. Parameters ---------- sentence : ``str`` The sentence for which an agenda will be produced. """ agenda = [] sentence = sentence.lower() if sentence.startswith("there is a box") or sentence.startswith("there is a tower "): agenda.append(self.terminal_productions["box_exists"]) elif sentence.startswith("there is a "): agenda.append(self.terminal_productions["object_exists"]) if "<Set[Box]:bool> -> box_exists" not in agenda: # These are object filters and do not apply if we have a box_exists at the top. if "touch" in sentence: if "top" in sentence: agenda.append(self.terminal_productions["touch_top"]) elif "bottom" in sentence or "base" in sentence: agenda.append(self.terminal_productions["touch_bottom"]) elif "corner" in sentence: agenda.append(self.terminal_productions["touch_corner"]) elif "right" in sentence: agenda.append(self.terminal_productions["touch_right"]) elif "left" in sentence: agenda.append(self.terminal_productions["touch_left"]) elif "wall" in sentence or "edge" in sentence: agenda.append(self.terminal_productions["touch_wall"]) else: agenda.append(self.terminal_productions["touch_object"]) else: # The words "top" and "bottom" may be referring to top and bottom blocks in a tower. if "top" in sentence: agenda.append(self.terminal_productions["top"]) elif "bottom" in sentence or "base" in sentence: agenda.append(self.terminal_productions["bottom"]) if " not " in sentence: agenda.append(self.terminal_productions["negate_filter"]) if " contains " in sentence or " has " in sentence: agenda.append(self.terminal_productions["all_boxes"]) # This takes care of shapes, colors, top, bottom, big, small etc. for constant, production in self.terminal_productions.items(): # TODO(pradeep): Deal with constant names with underscores. if "top" in constant or "bottom" in constant: # We already dealt with top, bottom, touch_top and touch_bottom above. continue if constant in sentence: if "<Set[Object]:Set[Object]> ->" in production and "<Set[Box]:bool> -> box_exists" in agenda: if constant in ["square", "circle", "triangle"]: agenda.append(self.terminal_productions[f"shape_{constant}"]) elif constant in ["yellow", "blue", "black"]: agenda.append(self.terminal_productions[f"color_{constant}"]) else: continue else: agenda.append(production) # TODO (pradeep): Rules for "member_*" productions ("tower" or "box" followed by a color, # shape or number...) number_productions = self._get_number_productions(sentence) for production in number_productions: agenda.append(production) if not agenda: # None of the rules above was triggered! if "box" in sentence: agenda.append(self.terminal_productions["all_boxes"]) else: agenda.append(self.terminal_productions["all_objects"]) return agenda
[ "def", "get_agenda_for_sentence", "(", "self", ",", "sentence", ":", "str", ")", "->", "List", "[", "str", "]", ":", "agenda", "=", "[", "]", "sentence", "=", "sentence", ".", "lower", "(", ")", "if", "sentence", ".", "startswith", "(", "\"there is a box\"", ")", "or", "sentence", ".", "startswith", "(", "\"there is a tower \"", ")", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"box_exists\"", "]", ")", "elif", "sentence", ".", "startswith", "(", "\"there is a \"", ")", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"object_exists\"", "]", ")", "if", "\"<Set[Box]:bool> -> box_exists\"", "not", "in", "agenda", ":", "# These are object filters and do not apply if we have a box_exists at the top.", "if", "\"touch\"", "in", "sentence", ":", "if", "\"top\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"touch_top\"", "]", ")", "elif", "\"bottom\"", "in", "sentence", "or", "\"base\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"touch_bottom\"", "]", ")", "elif", "\"corner\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"touch_corner\"", "]", ")", "elif", "\"right\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"touch_right\"", "]", ")", "elif", "\"left\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"touch_left\"", "]", ")", "elif", "\"wall\"", "in", "sentence", "or", "\"edge\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"touch_wall\"", "]", ")", "else", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"touch_object\"", "]", ")", "else", ":", "# The words \"top\" and \"bottom\" may be referring to top and bottom blocks in a tower.", "if", "\"top\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"top\"", "]", ")", "elif", "\"bottom\"", "in", "sentence", "or", "\"base\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"bottom\"", "]", ")", "if", "\" not \"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"negate_filter\"", "]", ")", "if", "\" contains \"", "in", "sentence", "or", "\" has \"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"all_boxes\"", "]", ")", "# This takes care of shapes, colors, top, bottom, big, small etc.", "for", "constant", ",", "production", "in", "self", ".", "terminal_productions", ".", "items", "(", ")", ":", "# TODO(pradeep): Deal with constant names with underscores.", "if", "\"top\"", "in", "constant", "or", "\"bottom\"", "in", "constant", ":", "# We already dealt with top, bottom, touch_top and touch_bottom above.", "continue", "if", "constant", "in", "sentence", ":", "if", "\"<Set[Object]:Set[Object]> ->\"", "in", "production", "and", "\"<Set[Box]:bool> -> box_exists\"", "in", "agenda", ":", "if", "constant", "in", "[", "\"square\"", ",", "\"circle\"", ",", "\"triangle\"", "]", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "f\"shape_{constant}\"", "]", ")", "elif", "constant", "in", "[", "\"yellow\"", ",", "\"blue\"", ",", "\"black\"", "]", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "f\"color_{constant}\"", "]", ")", "else", ":", "continue", "else", ":", "agenda", ".", "append", "(", "production", ")", "# TODO (pradeep): Rules for \"member_*\" productions (\"tower\" or \"box\" followed by a color,", "# shape or number...)", "number_productions", "=", "self", ".", "_get_number_productions", "(", "sentence", ")", "for", "production", "in", "number_productions", ":", "agenda", ".", "append", "(", "production", ")", "if", "not", "agenda", ":", "# None of the rules above was triggered!", "if", "\"box\"", "in", "sentence", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"all_boxes\"", "]", ")", "else", ":", "agenda", ".", "append", "(", "self", ".", "terminal_productions", "[", "\"all_objects\"", "]", ")", "return", "agenda" ]
52.226667
24.866667
def _parse_pot(pot): """Parse the potential so it can be fed to C""" from .integrateFullOrbit import _parse_scf_pot #Figure out what's in pot if not isinstance(pot,list): pot= [pot] #Initialize everything pot_type= [] pot_args= [] npot= len(pot) for p in pot: # Prepare for wrappers NOT CURRENTLY SUPPORTED, SEE PLANAR OR FULL if isinstance(p,verticalPotential) \ and isinstance(p._Pot,potential.MN3ExponentialDiskPotential): # Need to do this one separately, because combination of many parts # Three Miyamoto-Nagai disks npot+= 2 pot_type.extend([5,5,5]) pot_args.extend([p._Pot._amp*p._Pot._mn3[0]._amp, p._Pot._mn3[0]._a,p._Pot._mn3[0]._b, p._R,p._phi, p._Pot._amp*p._Pot._mn3[1]._amp, p._Pot._mn3[1]._a,p._Pot._mn3[1]._b, p._R,p._phi, p._Pot._amp*p._Pot._mn3[2]._amp, p._Pot._mn3[2]._a,p._Pot._mn3[2]._b, p._R,p._phi]) elif isinstance(p,verticalPotential) \ and isinstance(p._Pot,potential.DiskSCFPotential): # Need to do this one separately, because combination of many parts # Need to pull this apart into: (a) SCF part, (b) constituent # [Sigma_i,h_i] parts # (a) SCF, multiply in any add'l amp pt,pa= _parse_scf_pot(p._Pot._scf,extra_amp=p._Pot._amp) pot_type.append(pt) pot_args.extend(pa) pot_args.extend([p._R,p._phi]) # (b) constituent [Sigma_i,h_i] parts for Sigma,hz in zip(p._Pot._Sigma_dict,p._Pot._hz_dict): npot+= 1 pot_type.append(26) stype= Sigma.get('type','exp') if stype == 'exp' \ or (stype == 'exp' and 'Rhole' in Sigma): pot_args.extend([3,0, 4.*nu.pi*Sigma.get('amp',1.)*p._Pot._amp, Sigma.get('h',1./3.)]) elif stype == 'expwhole' \ or (stype == 'exp' and 'Rhole' in Sigma): pot_args.extend([4,1, 4.*nu.pi*Sigma.get('amp',1.)*p._Pot._amp, Sigma.get('h',1./3.), Sigma.get('Rhole',0.5)]) hztype= hz.get('type','exp') if hztype == 'exp': pot_args.extend([0,hz.get('h',0.0375)]) elif hztype == 'sech2': pot_args.extend([1,hz.get('h',0.0375)]) pot_args.extend([p._R,p._phi]) elif isinstance(p,potential.KGPotential): pot_type.append(31) pot_args.extend([p._amp,p._K,p._D2,2.*p._F]) elif isinstance(p,potential.IsothermalDiskPotential): pot_type.append(32) pot_args.extend([p._amp*p._sigma2/p._H,2.*p._H]) # All other potentials can be handled in the same way as follows: elif isinstance(p,verticalPotential): _,pt,pa= _parse_pot_full(p._Pot) pot_type.extend(pt) pot_args.extend(pa) pot_args.append(p._R) pot_args.append(p._phi) pot_type= nu.array(pot_type,dtype=nu.int32,order='C') pot_args= nu.array(pot_args,dtype=nu.float64,order='C') return (npot,pot_type,pot_args)
[ "def", "_parse_pot", "(", "pot", ")", ":", "from", ".", "integrateFullOrbit", "import", "_parse_scf_pot", "#Figure out what's in pot", "if", "not", "isinstance", "(", "pot", ",", "list", ")", ":", "pot", "=", "[", "pot", "]", "#Initialize everything", "pot_type", "=", "[", "]", "pot_args", "=", "[", "]", "npot", "=", "len", "(", "pot", ")", "for", "p", "in", "pot", ":", "# Prepare for wrappers NOT CURRENTLY SUPPORTED, SEE PLANAR OR FULL", "if", "isinstance", "(", "p", ",", "verticalPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "MN3ExponentialDiskPotential", ")", ":", "# Need to do this one separately, because combination of many parts", "# Three Miyamoto-Nagai disks", "npot", "+=", "2", "pot_type", ".", "extend", "(", "[", "5", ",", "5", ",", "5", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", "*", "p", ".", "_Pot", ".", "_mn3", "[", "0", "]", ".", "_amp", ",", "p", ".", "_Pot", ".", "_mn3", "[", "0", "]", ".", "_a", ",", "p", ".", "_Pot", ".", "_mn3", "[", "0", "]", ".", "_b", ",", "p", ".", "_R", ",", "p", ".", "_phi", ",", "p", ".", "_Pot", ".", "_amp", "*", "p", ".", "_Pot", ".", "_mn3", "[", "1", "]", ".", "_amp", ",", "p", ".", "_Pot", ".", "_mn3", "[", "1", "]", ".", "_a", ",", "p", ".", "_Pot", ".", "_mn3", "[", "1", "]", ".", "_b", ",", "p", ".", "_R", ",", "p", ".", "_phi", ",", "p", ".", "_Pot", ".", "_amp", "*", "p", ".", "_Pot", ".", "_mn3", "[", "2", "]", ".", "_amp", ",", "p", ".", "_Pot", ".", "_mn3", "[", "2", "]", ".", "_a", ",", "p", ".", "_Pot", ".", "_mn3", "[", "2", "]", ".", "_b", ",", "p", ".", "_R", ",", "p", ".", "_phi", "]", ")", "elif", "isinstance", "(", "p", ",", "verticalPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "DiskSCFPotential", ")", ":", "# Need to do this one separately, because combination of many parts", "# Need to pull this apart into: (a) SCF part, (b) constituent", "# [Sigma_i,h_i] parts", "# (a) SCF, multiply in any add'l amp", "pt", ",", "pa", "=", "_parse_scf_pot", "(", "p", ".", "_Pot", ".", "_scf", ",", "extra_amp", "=", "p", ".", "_Pot", ".", "_amp", ")", "pot_type", ".", "append", "(", "pt", ")", "pot_args", ".", "extend", "(", "pa", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_R", ",", "p", ".", "_phi", "]", ")", "# (b) constituent [Sigma_i,h_i] parts", "for", "Sigma", ",", "hz", "in", "zip", "(", "p", ".", "_Pot", ".", "_Sigma_dict", ",", "p", ".", "_Pot", ".", "_hz_dict", ")", ":", "npot", "+=", "1", "pot_type", ".", "append", "(", "26", ")", "stype", "=", "Sigma", ".", "get", "(", "'type'", ",", "'exp'", ")", "if", "stype", "==", "'exp'", "or", "(", "stype", "==", "'exp'", "and", "'Rhole'", "in", "Sigma", ")", ":", "pot_args", ".", "extend", "(", "[", "3", ",", "0", ",", "4.", "*", "nu", ".", "pi", "*", "Sigma", ".", "get", "(", "'amp'", ",", "1.", ")", "*", "p", ".", "_Pot", ".", "_amp", ",", "Sigma", ".", "get", "(", "'h'", ",", "1.", "/", "3.", ")", "]", ")", "elif", "stype", "==", "'expwhole'", "or", "(", "stype", "==", "'exp'", "and", "'Rhole'", "in", "Sigma", ")", ":", "pot_args", ".", "extend", "(", "[", "4", ",", "1", ",", "4.", "*", "nu", ".", "pi", "*", "Sigma", ".", "get", "(", "'amp'", ",", "1.", ")", "*", "p", ".", "_Pot", ".", "_amp", ",", "Sigma", ".", "get", "(", "'h'", ",", "1.", "/", "3.", ")", ",", "Sigma", ".", "get", "(", "'Rhole'", ",", "0.5", ")", "]", ")", "hztype", "=", "hz", ".", "get", "(", "'type'", ",", "'exp'", ")", "if", "hztype", "==", "'exp'", ":", "pot_args", ".", "extend", "(", "[", "0", ",", "hz", ".", "get", "(", "'h'", ",", "0.0375", ")", "]", ")", "elif", "hztype", "==", "'sech2'", ":", "pot_args", ".", "extend", "(", "[", "1", ",", "hz", ".", "get", "(", "'h'", ",", "0.0375", ")", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_R", ",", "p", ".", "_phi", "]", ")", "elif", "isinstance", "(", "p", ",", "potential", ".", "KGPotential", ")", ":", "pot_type", ".", "append", "(", "31", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_K", ",", "p", ".", "_D2", ",", "2.", "*", "p", ".", "_F", "]", ")", "elif", "isinstance", "(", "p", ",", "potential", ".", "IsothermalDiskPotential", ")", ":", "pot_type", ".", "append", "(", "32", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", "*", "p", ".", "_sigma2", "/", "p", ".", "_H", ",", "2.", "*", "p", ".", "_H", "]", ")", "# All other potentials can be handled in the same way as follows:", "elif", "isinstance", "(", "p", ",", "verticalPotential", ")", ":", "_", ",", "pt", ",", "pa", "=", "_parse_pot_full", "(", "p", ".", "_Pot", ")", "pot_type", ".", "extend", "(", "pt", ")", "pot_args", ".", "extend", "(", "pa", ")", "pot_args", ".", "append", "(", "p", ".", "_R", ")", "pot_args", ".", "append", "(", "p", ".", "_phi", ")", "pot_type", "=", "nu", ".", "array", "(", "pot_type", ",", "dtype", "=", "nu", ".", "int32", ",", "order", "=", "'C'", ")", "pot_args", "=", "nu", ".", "array", "(", "pot_args", ",", "dtype", "=", "nu", ".", "float64", ",", "order", "=", "'C'", ")", "return", "(", "npot", ",", "pot_type", ",", "pot_args", ")" ]
47.226667
15.133333
def get_eventhub_info(self): """ Get details on the specified EventHub. Keys in the details dictionary include: -'name' -'type' -'created_at' -'partition_count' -'partition_ids' :rtype: dict """ alt_creds = { "username": self._auth_config.get("iot_username"), "password":self._auth_config.get("iot_password")} try: mgmt_auth = self._create_auth(**alt_creds) mgmt_client = uamqp.AMQPClient(self.mgmt_target, auth=mgmt_auth, debug=self.debug) mgmt_client.open() mgmt_msg = Message(application_properties={'name': self.eh_name}) response = mgmt_client.mgmt_request( mgmt_msg, constants.READ_OPERATION, op_type=b'com.microsoft:eventhub', status_code_field=b'status-code', description_fields=b'status-description') eh_info = response.get_data() output = {} if eh_info: output['name'] = eh_info[b'name'].decode('utf-8') output['type'] = eh_info[b'type'].decode('utf-8') output['created_at'] = datetime.datetime.fromtimestamp(float(eh_info[b'created_at'])/1000) output['partition_count'] = eh_info[b'partition_count'] output['partition_ids'] = [p.decode('utf-8') for p in eh_info[b'partition_ids']] return output finally: mgmt_client.close()
[ "def", "get_eventhub_info", "(", "self", ")", ":", "alt_creds", "=", "{", "\"username\"", ":", "self", ".", "_auth_config", ".", "get", "(", "\"iot_username\"", ")", ",", "\"password\"", ":", "self", ".", "_auth_config", ".", "get", "(", "\"iot_password\"", ")", "}", "try", ":", "mgmt_auth", "=", "self", ".", "_create_auth", "(", "*", "*", "alt_creds", ")", "mgmt_client", "=", "uamqp", ".", "AMQPClient", "(", "self", ".", "mgmt_target", ",", "auth", "=", "mgmt_auth", ",", "debug", "=", "self", ".", "debug", ")", "mgmt_client", ".", "open", "(", ")", "mgmt_msg", "=", "Message", "(", "application_properties", "=", "{", "'name'", ":", "self", ".", "eh_name", "}", ")", "response", "=", "mgmt_client", ".", "mgmt_request", "(", "mgmt_msg", ",", "constants", ".", "READ_OPERATION", ",", "op_type", "=", "b'com.microsoft:eventhub'", ",", "status_code_field", "=", "b'status-code'", ",", "description_fields", "=", "b'status-description'", ")", "eh_info", "=", "response", ".", "get_data", "(", ")", "output", "=", "{", "}", "if", "eh_info", ":", "output", "[", "'name'", "]", "=", "eh_info", "[", "b'name'", "]", ".", "decode", "(", "'utf-8'", ")", "output", "[", "'type'", "]", "=", "eh_info", "[", "b'type'", "]", ".", "decode", "(", "'utf-8'", ")", "output", "[", "'created_at'", "]", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "float", "(", "eh_info", "[", "b'created_at'", "]", ")", "/", "1000", ")", "output", "[", "'partition_count'", "]", "=", "eh_info", "[", "b'partition_count'", "]", "output", "[", "'partition_ids'", "]", "=", "[", "p", ".", "decode", "(", "'utf-8'", ")", "for", "p", "in", "eh_info", "[", "b'partition_ids'", "]", "]", "return", "output", "finally", ":", "mgmt_client", ".", "close", "(", ")" ]
41.27027
19.324324
def get_state(self, scaling_group): """ Returns the current state of the specified scaling group as a dictionary. """ uri = "/%s/%s/state" % (self.uri_base, utils.get_id(scaling_group)) resp, resp_body = self.api.method_get(uri) data = resp_body["group"] ret = {} ret["active"] = [itm["id"] for itm in data["active"]] ret["active_capacity"] = data["activeCapacity"] ret["desired_capacity"] = data["desiredCapacity"] ret["pending_capacity"] = data["pendingCapacity"] ret["paused"] = data["paused"] return ret
[ "def", "get_state", "(", "self", ",", "scaling_group", ")", ":", "uri", "=", "\"/%s/%s/state\"", "%", "(", "self", ".", "uri_base", ",", "utils", ".", "get_id", "(", "scaling_group", ")", ")", "resp", ",", "resp_body", "=", "self", ".", "api", ".", "method_get", "(", "uri", ")", "data", "=", "resp_body", "[", "\"group\"", "]", "ret", "=", "{", "}", "ret", "[", "\"active\"", "]", "=", "[", "itm", "[", "\"id\"", "]", "for", "itm", "in", "data", "[", "\"active\"", "]", "]", "ret", "[", "\"active_capacity\"", "]", "=", "data", "[", "\"activeCapacity\"", "]", "ret", "[", "\"desired_capacity\"", "]", "=", "data", "[", "\"desiredCapacity\"", "]", "ret", "[", "\"pending_capacity\"", "]", "=", "data", "[", "\"pendingCapacity\"", "]", "ret", "[", "\"paused\"", "]", "=", "data", "[", "\"paused\"", "]", "return", "ret" ]
40.333333
15
def tag_add(package, tag, pkghash): """ Add a new tag for a given package hash. Unlike versions, tags can have an arbitrary format, and can be modified and deleted. When a package is pushed, it gets the "latest" tag. """ team, owner, pkg = parse_package(package) session = _get_session(team) session.put( "{url}/api/tag/{owner}/{pkg}/{tag}".format( url=get_registry_url(team), owner=owner, pkg=pkg, tag=tag ), data=json.dumps(dict( hash=_match_hash(package, pkghash) )) )
[ "def", "tag_add", "(", "package", ",", "tag", ",", "pkghash", ")", ":", "team", ",", "owner", ",", "pkg", "=", "parse_package", "(", "package", ")", "session", "=", "_get_session", "(", "team", ")", "session", ".", "put", "(", "\"{url}/api/tag/{owner}/{pkg}/{tag}\"", ".", "format", "(", "url", "=", "get_registry_url", "(", "team", ")", ",", "owner", "=", "owner", ",", "pkg", "=", "pkg", ",", "tag", "=", "tag", ")", ",", "data", "=", "json", ".", "dumps", "(", "dict", "(", "hash", "=", "_match_hash", "(", "package", ",", "pkghash", ")", ")", ")", ")" ]
25.391304
18.26087
def get_all_names(chebi_ids): '''Returns all names''' all_names = [get_names(chebi_id) for chebi_id in chebi_ids] return [x for sublist in all_names for x in sublist]
[ "def", "get_all_names", "(", "chebi_ids", ")", ":", "all_names", "=", "[", "get_names", "(", "chebi_id", ")", "for", "chebi_id", "in", "chebi_ids", "]", "return", "[", "x", "for", "sublist", "in", "all_names", "for", "x", "in", "sublist", "]" ]
43.75
15.75
def get_slice_from_res_id(self, start, end): """Returns a new `Polypeptide` containing the `Residues` in start/end range. Parameters ---------- start : str string representing start residue id (PDB numbering) end : str string representing end residue id (PDB numbering) Returns ------- slice_polymer : Polymer Polymer containing the residue range specified by start-end """ id_dict = {str(m.id): m for m in self._monomers} slice_polymer = Polypeptide( [id_dict[str(x)] for x in range(int(start), int(end) + 1)], self.id) return slice_polymer
[ "def", "get_slice_from_res_id", "(", "self", ",", "start", ",", "end", ")", ":", "id_dict", "=", "{", "str", "(", "m", ".", "id", ")", ":", "m", "for", "m", "in", "self", ".", "_monomers", "}", "slice_polymer", "=", "Polypeptide", "(", "[", "id_dict", "[", "str", "(", "x", ")", "]", "for", "x", "in", "range", "(", "int", "(", "start", ")", ",", "int", "(", "end", ")", "+", "1", ")", "]", ",", "self", ".", "id", ")", "return", "slice_polymer" ]
33.45
21
def _search(self, trie, strings, limit=None): """Search in cache :param strings: list of strings to get from the cache :type strings: str list :param limit: limit search results :type limit: int :rtype: [Resource | Collection] """ results = [trie.has_keys_with_prefix(s) for s in strings] if not any(results): return [] for result, s in zip(results, strings): if result is True: return trie.values(s)[:limit]
[ "def", "_search", "(", "self", ",", "trie", ",", "strings", ",", "limit", "=", "None", ")", ":", "results", "=", "[", "trie", ".", "has_keys_with_prefix", "(", "s", ")", "for", "s", "in", "strings", "]", "if", "not", "any", "(", "results", ")", ":", "return", "[", "]", "for", "result", ",", "s", "in", "zip", "(", "results", ",", "strings", ")", ":", "if", "result", "is", "True", ":", "return", "trie", ".", "values", "(", "s", ")", "[", ":", "limit", "]" ]
32.1875
13.25
def Marginal(self, i, name=''): """Gets the marginal distribution of the indicated variable. i: index of the variable we want Returns: Pmf """ pmf = Pmf(name=name) for vs, prob in self.Items(): pmf.Incr(vs[i], prob) return pmf
[ "def", "Marginal", "(", "self", ",", "i", ",", "name", "=", "''", ")", ":", "pmf", "=", "Pmf", "(", "name", "=", "name", ")", "for", "vs", ",", "prob", "in", "self", ".", "Items", "(", ")", ":", "pmf", ".", "Incr", "(", "vs", "[", "i", "]", ",", "prob", ")", "return", "pmf" ]
26
13.909091
def set_contents(self, stream, progress_callback=None): """Save contents of stream to part of file instance. If a the MultipartObject is completed this methods raises an ``MultipartAlreadyCompleted`` exception. :param stream: File-like stream. :param size: Size of stream if known. :param chunk_size: Desired chunk size to read stream in. It is up to the storage interface if it respects this value. """ size, checksum = self.multipart.file.update_contents( stream, seek=self.start_byte, size=self.part_size, progress_callback=progress_callback, ) self.checksum = checksum return self
[ "def", "set_contents", "(", "self", ",", "stream", ",", "progress_callback", "=", "None", ")", ":", "size", ",", "checksum", "=", "self", ".", "multipart", ".", "file", ".", "update_contents", "(", "stream", ",", "seek", "=", "self", ".", "start_byte", ",", "size", "=", "self", ".", "part_size", ",", "progress_callback", "=", "progress_callback", ",", ")", "self", ".", "checksum", "=", "checksum", "return", "self" ]
40.823529
17.823529
def _get_element_text(self, element): """ Return the textual content of the element and its children """ text = '' if element.text is not None: text += element.text for child in element.getchildren(): text += self._get_element_text(child) if element.tail is not None: text += element.tail return text
[ "def", "_get_element_text", "(", "self", ",", "element", ")", ":", "text", "=", "''", "if", "element", ".", "text", "is", "not", "None", ":", "text", "+=", "element", ".", "text", "for", "child", "in", "element", ".", "getchildren", "(", ")", ":", "text", "+=", "self", ".", "_get_element_text", "(", "child", ")", "if", "element", ".", "tail", "is", "not", "None", ":", "text", "+=", "element", ".", "tail", "return", "text" ]
32.416667
9.083333
def added(self, context): """Ingredient method called before anything else.""" context.ansi = ANSIFormatter(self._enable) context.aprint = context.ansi.aprint
[ "def", "added", "(", "self", ",", "context", ")", ":", "context", ".", "ansi", "=", "ANSIFormatter", "(", "self", ".", "_enable", ")", "context", ".", "aprint", "=", "context", ".", "ansi", ".", "aprint" ]
44.75
7.25
def build(ctx, project, build): # pylint:disable=redefined-outer-name """Commands for build jobs.""" ctx.obj = ctx.obj or {} ctx.obj['project'] = project ctx.obj['build'] = build
[ "def", "build", "(", "ctx", ",", "project", ",", "build", ")", ":", "# pylint:disable=redefined-outer-name", "ctx", ".", "obj", "=", "ctx", ".", "obj", "or", "{", "}", "ctx", ".", "obj", "[", "'project'", "]", "=", "project", "ctx", ".", "obj", "[", "'build'", "]", "=", "build" ]
38.2
12.6
def drop_trailing_zeros(num): """ Drops the trailing zeros in a float that is printed. """ txt = '%f' %(num) txt = txt.rstrip('0') if txt.endswith('.'): txt = txt[:-1] return txt
[ "def", "drop_trailing_zeros", "(", "num", ")", ":", "txt", "=", "'%f'", "%", "(", "num", ")", "txt", "=", "txt", ".", "rstrip", "(", "'0'", ")", "if", "txt", ".", "endswith", "(", "'.'", ")", ":", "txt", "=", "txt", "[", ":", "-", "1", "]", "return", "txt" ]
22.888889
13.333333
def str2dict(dotted_str, value=None, separator='.'): """ Convert dotted string to dict splitting by :separator: """ dict_ = {} parts = dotted_str.split(separator) d, prev = dict_, None for part in parts: prev = d d = d.setdefault(part, {}) else: if value is not None: prev[part] = value return dict_
[ "def", "str2dict", "(", "dotted_str", ",", "value", "=", "None", ",", "separator", "=", "'.'", ")", ":", "dict_", "=", "{", "}", "parts", "=", "dotted_str", ".", "split", "(", "separator", ")", "d", ",", "prev", "=", "dict_", ",", "None", "for", "part", "in", "parts", ":", "prev", "=", "d", "d", "=", "d", ".", "setdefault", "(", "part", ",", "{", "}", ")", "else", ":", "if", "value", "is", "not", "None", ":", "prev", "[", "part", "]", "=", "value", "return", "dict_" ]
29.333333
14.833333
def replace_col(self, line, ndx): """ replace a grids column at index 'ndx' with 'line' """ for row in range(len(line)): self.set_tile(row, ndx, line[row])
[ "def", "replace_col", "(", "self", ",", "line", ",", "ndx", ")", ":", "for", "row", "in", "range", "(", "len", "(", "line", ")", ")", ":", "self", ".", "set_tile", "(", "row", ",", "ndx", ",", "line", "[", "row", "]", ")" ]
32.666667
5.833333
def remote_url(connector, env, repo, filename): """ return a str containing a link to the rpm in the pulp repository """ dl_base = connector.base_url.replace('/pulp/api/v2', '/pulp/repos') repoid = '%s-%s' % (repo, env) _r = connector.get('/repositories/%s/' % repoid) if not _r.status_code == Constants.PULP_GET_OK: # maybe the repo name is the repoid _r = connector.get('/repositories/%s/' % repo) if not _r.status_code == Constants.PULP_GET_OK: raise JuicerPulpError("%s was not found as a repoid. Status code %s returned by pulp" % \ (repoid, _r.status_code)) repo = juicer.utils.load_json_str(_r.content)['display_name'] link = '%s/%s/%s/%s' % (dl_base, env, repo, filename) return link
[ "def", "remote_url", "(", "connector", ",", "env", ",", "repo", ",", "filename", ")", ":", "dl_base", "=", "connector", ".", "base_url", ".", "replace", "(", "'/pulp/api/v2'", ",", "'/pulp/repos'", ")", "repoid", "=", "'%s-%s'", "%", "(", "repo", ",", "env", ")", "_r", "=", "connector", ".", "get", "(", "'/repositories/%s/'", "%", "repoid", ")", "if", "not", "_r", ".", "status_code", "==", "Constants", ".", "PULP_GET_OK", ":", "# maybe the repo name is the repoid", "_r", "=", "connector", ".", "get", "(", "'/repositories/%s/'", "%", "repo", ")", "if", "not", "_r", ".", "status_code", "==", "Constants", ".", "PULP_GET_OK", ":", "raise", "JuicerPulpError", "(", "\"%s was not found as a repoid. Status code %s returned by pulp\"", "%", "(", "repoid", ",", "_r", ".", "status_code", ")", ")", "repo", "=", "juicer", ".", "utils", ".", "load_json_str", "(", "_r", ".", "content", ")", "[", "'display_name'", "]", "link", "=", "'%s/%s/%s/%s'", "%", "(", "dl_base", ",", "env", ",", "repo", ",", "filename", ")", "return", "link" ]
36.761905
21.904762
def save_password(entry, password, username=None): """ Saves the given password in the user's keychain. :param entry: The entry in the keychain. This is a caller specific key. :param password: The password to save in the keychain. :param username: The username to get the password for. Default is the current user. """ if username is None: username = get_username() has_keychain = initialize_keychain() if has_keychain: try: keyring.set_password(entry, username, password) except Exception as e: log.warn("Unable to set password in keyring. Continuing..") log.debug(e)
[ "def", "save_password", "(", "entry", ",", "password", ",", "username", "=", "None", ")", ":", "if", "username", "is", "None", ":", "username", "=", "get_username", "(", ")", "has_keychain", "=", "initialize_keychain", "(", ")", "if", "has_keychain", ":", "try", ":", "keyring", ".", "set_password", "(", "entry", ",", "username", ",", "password", ")", "except", "Exception", "as", "e", ":", "log", ".", "warn", "(", "\"Unable to set password in keyring. Continuing..\"", ")", "log", ".", "debug", "(", "e", ")" ]
32.85
21.85
def _clear_dead_entities(self): """Finalize deletion of any Entities that are marked dead. In the interest of performance, this method duplicates code from the `delete_entity` method. If that method is changed, those changes should be duplicated here as well. """ for entity in self._dead_entities: for component_type in self._entities[entity]: self._components[component_type].discard(entity) if not self._components[component_type]: del self._components[component_type] del self._entities[entity] self._dead_entities.clear() self.clear_cache()
[ "def", "_clear_dead_entities", "(", "self", ")", ":", "for", "entity", "in", "self", ".", "_dead_entities", ":", "for", "component_type", "in", "self", ".", "_entities", "[", "entity", "]", ":", "self", ".", "_components", "[", "component_type", "]", ".", "discard", "(", "entity", ")", "if", "not", "self", ".", "_components", "[", "component_type", "]", ":", "del", "self", ".", "_components", "[", "component_type", "]", "del", "self", ".", "_entities", "[", "entity", "]", "self", ".", "_dead_entities", ".", "clear", "(", ")", "self", ".", "clear_cache", "(", ")" ]
35.789474
19.842105
def management_policies(self): """Instance depends on the API version: * 2018-07-01: :class:`ManagementPoliciesOperations<azure.mgmt.storage.v2018_07_01.operations.ManagementPoliciesOperations>` """ api_version = self._get_api_version('management_policies') if api_version == '2018-07-01': from .v2018_07_01.operations import ManagementPoliciesOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
[ "def", "management_policies", "(", "self", ")", ":", "api_version", "=", "self", ".", "_get_api_version", "(", "'management_policies'", ")", "if", "api_version", "==", "'2018-07-01'", ":", "from", ".", "v2018_07_01", ".", "operations", "import", "ManagementPoliciesOperations", "as", "OperationClass", "else", ":", "raise", "NotImplementedError", "(", "\"APIVersion {} is not available\"", ".", "format", "(", "api_version", ")", ")", "return", "OperationClass", "(", "self", ".", "_client", ",", "self", ".", "config", ",", "Serializer", "(", "self", ".", "_models_dict", "(", "api_version", ")", ")", ",", "Deserializer", "(", "self", ".", "_models_dict", "(", "api_version", ")", ")", ")" ]
61
37.181818
def print_gce_info(zone, project, instance_name, data): """ outputs information about our Rackspace instance """ try: instance_info = _get_gce_compute().instances().get( project=project, zone=zone, instance=instance_name ).execute() log_yellow(pformat(instance_info)) log_green("Instance state: %s" % instance_info['status']) log_green("Ip address: %s" % data['ip_address']) except HttpError as e: if e.resp.status != 404: raise e log_yellow("Instance state: DOWN") log_green("project: %s" % project) log_green("zone: %s" % zone) log_green("disk_name: %s" % instance_name) log_green("user: %s" % data['username']) log_green("ssh -i %s %s@%s" % (env.key_filename, data['username'], data['ip_address']))
[ "def", "print_gce_info", "(", "zone", ",", "project", ",", "instance_name", ",", "data", ")", ":", "try", ":", "instance_info", "=", "_get_gce_compute", "(", ")", ".", "instances", "(", ")", ".", "get", "(", "project", "=", "project", ",", "zone", "=", "zone", ",", "instance", "=", "instance_name", ")", ".", "execute", "(", ")", "log_yellow", "(", "pformat", "(", "instance_info", ")", ")", "log_green", "(", "\"Instance state: %s\"", "%", "instance_info", "[", "'status'", "]", ")", "log_green", "(", "\"Ip address: %s\"", "%", "data", "[", "'ip_address'", "]", ")", "except", "HttpError", "as", "e", ":", "if", "e", ".", "resp", ".", "status", "!=", "404", ":", "raise", "e", "log_yellow", "(", "\"Instance state: DOWN\"", ")", "log_green", "(", "\"project: %s\"", "%", "project", ")", "log_green", "(", "\"zone: %s\"", "%", "zone", ")", "log_green", "(", "\"disk_name: %s\"", "%", "instance_name", ")", "log_green", "(", "\"user: %s\"", "%", "data", "[", "'username'", "]", ")", "log_green", "(", "\"ssh -i %s %s@%s\"", "%", "(", "env", ".", "key_filename", ",", "data", "[", "'username'", "]", ",", "data", "[", "'ip_address'", "]", ")", ")" ]
40.272727
12.272727
def _token_at_col_in_line(line, column, token, token_len=None): """True if token is at column.""" if not token_len: token_len = len(token) remaining_len = len(line) - column return (remaining_len >= token_len and line[column:column + token_len] == token)
[ "def", "_token_at_col_in_line", "(", "line", ",", "column", ",", "token", ",", "token_len", "=", "None", ")", ":", "if", "not", "token_len", ":", "token_len", "=", "len", "(", "token", ")", "remaining_len", "=", "len", "(", "line", ")", "-", "column", "return", "(", "remaining_len", ">=", "token_len", "and", "line", "[", "column", ":", "column", "+", "token_len", "]", "==", "token", ")" ]
31.555556
16.555556