Dataset Preview Go to dataset viewer
repository_name (string)func_path_in_repository (string)func_name (string)whole_func_string (string)language (string)func_code_string (string)func_code_tokens (json)func_documentation_string (string)func_documentation_tokens (json)split_name (string)func_code_url (string)
mjirik/imcut
imcut/pycut.py
ImageGraphCut.__msgc_step3_discontinuity_localization
def __msgc_step3_discontinuity_localization(self): """ Estimate discontinuity in basis of low resolution image segmentation. :return: discontinuity in low resolution """ import scipy start = self._start_time seg = 1 - self.segmentation.astype(np.int8) self.stats["low level object voxels"] = np.sum(seg) self.stats["low level image voxels"] = np.prod(seg.shape) # in seg is now stored low resolution segmentation # back to normal parameters # step 2: discontinuity localization # self.segparams = sparams_hi seg_border = scipy.ndimage.filters.laplace(seg, mode="constant") logger.debug("seg_border: %s", scipy.stats.describe(seg_border, axis=None)) # logger.debug(str(np.max(seg_border))) # logger.debug(str(np.min(seg_border))) seg_border[seg_border != 0] = 1 logger.debug("seg_border: %s", scipy.stats.describe(seg_border, axis=None)) # scipy.ndimage.morphology.distance_transform_edt boundary_dilatation_distance = self.segparams["boundary_dilatation_distance"] seg = scipy.ndimage.morphology.binary_dilation( seg_border, # seg, np.ones( [ (boundary_dilatation_distance * 2) + 1, (boundary_dilatation_distance * 2) + 1, (boundary_dilatation_distance * 2) + 1, ] ), ) if self.keep_temp_properties: self.temp_msgc_lowres_discontinuity = seg else: self.temp_msgc_lowres_discontinuity = None if self.debug_images: import sed3 pd = sed3.sed3(seg_border) # ), contour=seg) pd.show() pd = sed3.sed3(seg) # ), contour=seg) pd.show() # segzoom = scipy.ndimage.interpolation.zoom(seg.astype('float'), zoom, # order=0).astype('int8') self.stats["t3"] = time.time() - start return seg
python
def __msgc_step3_discontinuity_localization(self): """ Estimate discontinuity in basis of low resolution image segmentation. :return: discontinuity in low resolution """ import scipy start = self._start_time seg = 1 - self.segmentation.astype(np.int8) self.stats["low level object voxels"] = np.sum(seg) self.stats["low level image voxels"] = np.prod(seg.shape) # in seg is now stored low resolution segmentation # back to normal parameters # step 2: discontinuity localization # self.segparams = sparams_hi seg_border = scipy.ndimage.filters.laplace(seg, mode="constant") logger.debug("seg_border: %s", scipy.stats.describe(seg_border, axis=None)) # logger.debug(str(np.max(seg_border))) # logger.debug(str(np.min(seg_border))) seg_border[seg_border != 0] = 1 logger.debug("seg_border: %s", scipy.stats.describe(seg_border, axis=None)) # scipy.ndimage.morphology.distance_transform_edt boundary_dilatation_distance = self.segparams["boundary_dilatation_distance"] seg = scipy.ndimage.morphology.binary_dilation( seg_border, # seg, np.ones( [ (boundary_dilatation_distance * 2) + 1, (boundary_dilatation_distance * 2) + 1, (boundary_dilatation_distance * 2) + 1, ] ), ) if self.keep_temp_properties: self.temp_msgc_lowres_discontinuity = seg else: self.temp_msgc_lowres_discontinuity = None if self.debug_images: import sed3 pd = sed3.sed3(seg_border) # ), contour=seg) pd.show() pd = sed3.sed3(seg) # ), contour=seg) pd.show() # segzoom = scipy.ndimage.interpolation.zoom(seg.astype('float'), zoom, # order=0).astype('int8') self.stats["t3"] = time.time() - start return seg
[ "def", "__msgc_step3_discontinuity_localization", "(", "self", ")", ":", "import", "scipy", "start", "=", "self", ".", "_start_time", "seg", "=", "1", "-", "self", ".", "segmentation", ".", "astype", "(", "np", ".", "int8", ")", "self", ".", "stats", "[", "\"low level object voxels\"", "]", "=", "np", ".", "sum", "(", "seg", ")", "self", ".", "stats", "[", "\"low level image voxels\"", "]", "=", "np", ".", "prod", "(", "seg", ".", "shape", ")", "# in seg is now stored low resolution segmentation", "# back to normal parameters", "# step 2: discontinuity localization", "# self.segparams = sparams_hi", "seg_border", "=", "scipy", ".", "ndimage", ".", "filters", ".", "laplace", "(", "seg", ",", "mode", "=", "\"constant\"", ")", "logger", ".", "debug", "(", "\"seg_border: %s\"", ",", "scipy", ".", "stats", ".", "describe", "(", "seg_border", ",", "axis", "=", "None", ")", ")", "# logger.debug(str(np.max(seg_border)))", "# logger.debug(str(np.min(seg_border)))", "seg_border", "[", "seg_border", "!=", "0", "]", "=", "1", "logger", ".", "debug", "(", "\"seg_border: %s\"", ",", "scipy", ".", "stats", ".", "describe", "(", "seg_border", ",", "axis", "=", "None", ")", ")", "# scipy.ndimage.morphology.distance_transform_edt", "boundary_dilatation_distance", "=", "self", ".", "segparams", "[", "\"boundary_dilatation_distance\"", "]", "seg", "=", "scipy", ".", "ndimage", ".", "morphology", ".", "binary_dilation", "(", "seg_border", ",", "# seg,", "np", ".", "ones", "(", "[", "(", "boundary_dilatation_distance", "*", "2", ")", "+", "1", ",", "(", "boundary_dilatation_distance", "*", "2", ")", "+", "1", ",", "(", "boundary_dilatation_distance", "*", "2", ")", "+", "1", ",", "]", ")", ",", ")", "if", "self", ".", "keep_temp_properties", ":", "self", ".", "temp_msgc_lowres_discontinuity", "=", "seg", "else", ":", "self", ".", "temp_msgc_lowres_discontinuity", "=", "None", "if", "self", ".", "debug_images", ":", "import", "sed3", "pd", "=", "sed3", ".", "sed3", "(", "seg_border", ")", "# ), contour=seg)", "pd", ".", "show", "(", ")", "pd", "=", "sed3", ".", "sed3", "(", "seg", ")", "# ), contour=seg)", "pd", ".", "show", "(", ")", "# segzoom = scipy.ndimage.interpolation.zoom(seg.astype('float'), zoom,", "# order=0).astype('int8')", "self", ".", "stats", "[", "\"t3\"", "]", "=", "time", ".", "time", "(", ")", "-", "start", "return", "seg" ]
Estimate discontinuity in basis of low resolution image segmentation. :return: discontinuity in low resolution
[ "Estimate", "discontinuity", "in", "basis", "of", "low", "resolution", "image", "segmentation", ".", ":", "return", ":", "discontinuity", "in", "low", "resolution" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L323-L372
mjirik/imcut
imcut/pycut.py
ImageGraphCut.__multiscale_gc_lo2hi_run
def __multiscale_gc_lo2hi_run(self): # , pyed): """ Run Graph-Cut segmentation with refinement of low resolution multiscale graph. In first step is performed normal GC on low resolution data Second step construct finer grid on edges of segmentation from first step. There is no option for use without `use_boundary_penalties` """ # from PyQt4.QtCore import pyqtRemoveInputHook # pyqtRemoveInputHook() self._msgc_lo2hi_resize_init() self.__msgc_step0_init() hard_constraints = self.__msgc_step12_low_resolution_segmentation() # ===== high resolution data processing seg = self.__msgc_step3_discontinuity_localization() self.stats["t3.1"] = (time.time() - self._start_time) graph = Graph( seg, voxelsize=self.voxelsize, nsplit=self.segparams["block_size"], edge_weight_table=self._msgc_npenalty_table, compute_low_nodes_index=True, ) # graph.run() = graph.generate_base_grid() + graph.split_voxels() # graph.run() graph.generate_base_grid() self.stats["t3.2"] = (time.time() - self._start_time) graph.split_voxels() self.stats["t3.3"] = (time.time() - self._start_time) self.stats.update(graph.stats) self.stats["t4"] = (time.time() - self._start_time) mul_mask, mul_val = self.__msgc_tlinks_area_weight_from_low_segmentation(seg) area_weight = 1 unariesalt = self.__create_tlinks( self.img, self.voxelsize, self.seeds, area_weight=area_weight, hard_constraints=hard_constraints, mul_mask=None, mul_val=None, ) # N-links prepared self.stats["t5"] = (time.time() - self._start_time) un, ind = np.unique(graph.msinds, return_index=True) self.stats["t6"] = (time.time() - self._start_time) self.stats["t7"] = (time.time() - self._start_time) unariesalt2_lo2hi = np.hstack( [unariesalt[ind, 0, 0].reshape(-1, 1), unariesalt[ind, 0, 1].reshape(-1, 1)] ) nlinks_lo2hi = np.hstack([graph.edges, graph.edges_weights.reshape(-1, 1)]) if self.debug_images: import sed3 ed = sed3.sed3(unariesalt[:, :, 0].reshape(self.img.shape)) ed.show() import sed3 ed = sed3.sed3(unariesalt[:, :, 1].reshape(self.img.shape)) ed.show() # ed = sed3.sed3(seg) # ed.show() # import sed3 # ed = sed3.sed3(graph.data) # ed.show() # import sed3 # ed = sed3.sed3(graph.msinds) # ed.show() # nlinks, unariesalt2, msinds = self.__msgc_step45678_construct_graph(area_weight, hard_constraints, seg) # self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds) self.__msgc_step9_finish_perform_gc_and_reshape( nlinks_lo2hi, unariesalt2_lo2hi, graph.msinds ) self._msgc_lo2hi_resize_clean_finish()
python
def __multiscale_gc_lo2hi_run(self): # , pyed): """ Run Graph-Cut segmentation with refinement of low resolution multiscale graph. In first step is performed normal GC on low resolution data Second step construct finer grid on edges of segmentation from first step. There is no option for use without `use_boundary_penalties` """ # from PyQt4.QtCore import pyqtRemoveInputHook # pyqtRemoveInputHook() self._msgc_lo2hi_resize_init() self.__msgc_step0_init() hard_constraints = self.__msgc_step12_low_resolution_segmentation() # ===== high resolution data processing seg = self.__msgc_step3_discontinuity_localization() self.stats["t3.1"] = (time.time() - self._start_time) graph = Graph( seg, voxelsize=self.voxelsize, nsplit=self.segparams["block_size"], edge_weight_table=self._msgc_npenalty_table, compute_low_nodes_index=True, ) # graph.run() = graph.generate_base_grid() + graph.split_voxels() # graph.run() graph.generate_base_grid() self.stats["t3.2"] = (time.time() - self._start_time) graph.split_voxels() self.stats["t3.3"] = (time.time() - self._start_time) self.stats.update(graph.stats) self.stats["t4"] = (time.time() - self._start_time) mul_mask, mul_val = self.__msgc_tlinks_area_weight_from_low_segmentation(seg) area_weight = 1 unariesalt = self.__create_tlinks( self.img, self.voxelsize, self.seeds, area_weight=area_weight, hard_constraints=hard_constraints, mul_mask=None, mul_val=None, ) # N-links prepared self.stats["t5"] = (time.time() - self._start_time) un, ind = np.unique(graph.msinds, return_index=True) self.stats["t6"] = (time.time() - self._start_time) self.stats["t7"] = (time.time() - self._start_time) unariesalt2_lo2hi = np.hstack( [unariesalt[ind, 0, 0].reshape(-1, 1), unariesalt[ind, 0, 1].reshape(-1, 1)] ) nlinks_lo2hi = np.hstack([graph.edges, graph.edges_weights.reshape(-1, 1)]) if self.debug_images: import sed3 ed = sed3.sed3(unariesalt[:, :, 0].reshape(self.img.shape)) ed.show() import sed3 ed = sed3.sed3(unariesalt[:, :, 1].reshape(self.img.shape)) ed.show() # ed = sed3.sed3(seg) # ed.show() # import sed3 # ed = sed3.sed3(graph.data) # ed.show() # import sed3 # ed = sed3.sed3(graph.msinds) # ed.show() # nlinks, unariesalt2, msinds = self.__msgc_step45678_construct_graph(area_weight, hard_constraints, seg) # self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds) self.__msgc_step9_finish_perform_gc_and_reshape( nlinks_lo2hi, unariesalt2_lo2hi, graph.msinds ) self._msgc_lo2hi_resize_clean_finish()
[ "def", "__multiscale_gc_lo2hi_run", "(", "self", ")", ":", "# , pyed):", "# from PyQt4.QtCore import pyqtRemoveInputHook", "# pyqtRemoveInputHook()", "self", ".", "_msgc_lo2hi_resize_init", "(", ")", "self", ".", "__msgc_step0_init", "(", ")", "hard_constraints", "=", "self", ".", "__msgc_step12_low_resolution_segmentation", "(", ")", "# ===== high resolution data processing", "seg", "=", "self", ".", "__msgc_step3_discontinuity_localization", "(", ")", "self", ".", "stats", "[", "\"t3.1\"", "]", "=", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", "graph", "=", "Graph", "(", "seg", ",", "voxelsize", "=", "self", ".", "voxelsize", ",", "nsplit", "=", "self", ".", "segparams", "[", "\"block_size\"", "]", ",", "edge_weight_table", "=", "self", ".", "_msgc_npenalty_table", ",", "compute_low_nodes_index", "=", "True", ",", ")", "# graph.run() = graph.generate_base_grid() + graph.split_voxels()", "# graph.run()", "graph", ".", "generate_base_grid", "(", ")", "self", ".", "stats", "[", "\"t3.2\"", "]", "=", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", "graph", ".", "split_voxels", "(", ")", "self", ".", "stats", "[", "\"t3.3\"", "]", "=", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", "self", ".", "stats", ".", "update", "(", "graph", ".", "stats", ")", "self", ".", "stats", "[", "\"t4\"", "]", "=", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", "mul_mask", ",", "mul_val", "=", "self", ".", "__msgc_tlinks_area_weight_from_low_segmentation", "(", "seg", ")", "area_weight", "=", "1", "unariesalt", "=", "self", ".", "__create_tlinks", "(", "self", ".", "img", ",", "self", ".", "voxelsize", ",", "self", ".", "seeds", ",", "area_weight", "=", "area_weight", ",", "hard_constraints", "=", "hard_constraints", ",", "mul_mask", "=", "None", ",", "mul_val", "=", "None", ",", ")", "# N-links prepared", "self", ".", "stats", "[", "\"t5\"", "]", "=", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", "un", ",", "ind", "=", "np", ".", "unique", "(", "graph", ".", "msinds", ",", "return_index", "=", "True", ")", "self", ".", "stats", "[", "\"t6\"", "]", "=", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", "self", ".", "stats", "[", "\"t7\"", "]", "=", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", "unariesalt2_lo2hi", "=", "np", ".", "hstack", "(", "[", "unariesalt", "[", "ind", ",", "0", ",", "0", "]", ".", "reshape", "(", "-", "1", ",", "1", ")", ",", "unariesalt", "[", "ind", ",", "0", ",", "1", "]", ".", "reshape", "(", "-", "1", ",", "1", ")", "]", ")", "nlinks_lo2hi", "=", "np", ".", "hstack", "(", "[", "graph", ".", "edges", ",", "graph", ".", "edges_weights", ".", "reshape", "(", "-", "1", ",", "1", ")", "]", ")", "if", "self", ".", "debug_images", ":", "import", "sed3", "ed", "=", "sed3", ".", "sed3", "(", "unariesalt", "[", ":", ",", ":", ",", "0", "]", ".", "reshape", "(", "self", ".", "img", ".", "shape", ")", ")", "ed", ".", "show", "(", ")", "import", "sed3", "ed", "=", "sed3", ".", "sed3", "(", "unariesalt", "[", ":", ",", ":", ",", "1", "]", ".", "reshape", "(", "self", ".", "img", ".", "shape", ")", ")", "ed", ".", "show", "(", ")", "# ed = sed3.sed3(seg)", "# ed.show()", "# import sed3", "# ed = sed3.sed3(graph.data)", "# ed.show()", "# import sed3", "# ed = sed3.sed3(graph.msinds)", "# ed.show()", "# nlinks, unariesalt2, msinds = self.__msgc_step45678_construct_graph(area_weight, hard_constraints, seg)", "# self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds)", "self", ".", "__msgc_step9_finish_perform_gc_and_reshape", "(", "nlinks_lo2hi", ",", "unariesalt2_lo2hi", ",", "graph", ".", "msinds", ")", "self", ".", "_msgc_lo2hi_resize_clean_finish", "(", ")" ]
Run Graph-Cut segmentation with refinement of low resolution multiscale graph. In first step is performed normal GC on low resolution data Second step construct finer grid on edges of segmentation from first step. There is no option for use without `use_boundary_penalties`
[ "Run", "Graph", "-", "Cut", "segmentation", "with", "refinement", "of", "low", "resolution", "multiscale", "graph", ".", "In", "first", "step", "is", "performed", "normal", "GC", "on", "low", "resolution", "data", "Second", "step", "construct", "finer", "grid", "on", "edges", "of", "segmentation", "from", "first", "step", ".", "There", "is", "no", "option", "for", "use", "without", "use_boundary_penalties" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L526-L606
mjirik/imcut
imcut/pycut.py
ImageGraphCut.__multiscale_gc_hi2lo_run
def __multiscale_gc_hi2lo_run(self): # , pyed): """ Run Graph-Cut segmentation with simplifiyng of high resolution multiscale graph. In first step is performed normal GC on low resolution data Second step construct finer grid on edges of segmentation from first step. There is no option for use without `use_boundary_penalties` """ # from PyQt4.QtCore import pyqtRemoveInputHook # pyqtRemoveInputHook() self.__msgc_step0_init() hard_constraints = self.__msgc_step12_low_resolution_segmentation() # ===== high resolution data processing seg = self.__msgc_step3_discontinuity_localization() nlinks, unariesalt2, msinds = self.__msgc_step45678_hi2lo_construct_graph( hard_constraints, seg ) self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds)
python
def __multiscale_gc_hi2lo_run(self): # , pyed): """ Run Graph-Cut segmentation with simplifiyng of high resolution multiscale graph. In first step is performed normal GC on low resolution data Second step construct finer grid on edges of segmentation from first step. There is no option for use without `use_boundary_penalties` """ # from PyQt4.QtCore import pyqtRemoveInputHook # pyqtRemoveInputHook() self.__msgc_step0_init() hard_constraints = self.__msgc_step12_low_resolution_segmentation() # ===== high resolution data processing seg = self.__msgc_step3_discontinuity_localization() nlinks, unariesalt2, msinds = self.__msgc_step45678_hi2lo_construct_graph( hard_constraints, seg ) self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds)
[ "def", "__multiscale_gc_hi2lo_run", "(", "self", ")", ":", "# , pyed):", "# from PyQt4.QtCore import pyqtRemoveInputHook", "# pyqtRemoveInputHook()", "self", ".", "__msgc_step0_init", "(", ")", "hard_constraints", "=", "self", ".", "__msgc_step12_low_resolution_segmentation", "(", ")", "# ===== high resolution data processing", "seg", "=", "self", ".", "__msgc_step3_discontinuity_localization", "(", ")", "nlinks", ",", "unariesalt2", ",", "msinds", "=", "self", ".", "__msgc_step45678_hi2lo_construct_graph", "(", "hard_constraints", ",", "seg", ")", "self", ".", "__msgc_step9_finish_perform_gc_and_reshape", "(", "nlinks", ",", "unariesalt2", ",", "msinds", ")" ]
Run Graph-Cut segmentation with simplifiyng of high resolution multiscale graph. In first step is performed normal GC on low resolution data Second step construct finer grid on edges of segmentation from first step. There is no option for use without `use_boundary_penalties`
[ "Run", "Graph", "-", "Cut", "segmentation", "with", "simplifiyng", "of", "high", "resolution", "multiscale", "graph", ".", "In", "first", "step", "is", "performed", "normal", "GC", "on", "low", "resolution", "data", "Second", "step", "construct", "finer", "grid", "on", "edges", "of", "segmentation", "from", "first", "step", ".", "There", "is", "no", "option", "for", "use", "without", "use_boundary_penalties" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L608-L626
mjirik/imcut
imcut/pycut.py
ImageGraphCut.__ordered_values_by_indexes
def __ordered_values_by_indexes(self, data, inds): """ Return values (intensities) by indexes. Used for multiscale graph cut. data = [[0 1 1], [0 2 2], [0 2 2]] inds = [[0 1 2], [3 4 4], [5 4 4]] return: [0, 1, 1, 0, 2, 0] If the data are not consistent, it will take the maximal value """ # get unique labels and their first indexes # lab, linds = np.unique(inds, return_index=True) # compute values by indexes # values = data.reshape(-1)[linds] # alternative slow implementation # if there are different data on same index, it will take # maximal value # lab = np.unique(inds) # values = [0]*len(lab) # for label in lab: # values[label] = np.max(data[inds == label]) # # values = np.asarray(values) # yet another implementation values = [None] * (np.max(inds) + 1) linear_inds = inds.ravel() linear_data = data.ravel() for i in range(0, len(linear_inds)): # going over all data pixels if values[linear_inds[i]] is None: # this index is found for first values[linear_inds[i]] = linear_data[i] elif values[linear_inds[i]] < linear_data[i]: # here can be changed maximal or minimal value values[linear_inds[i]] = linear_data[i] values = np.asarray(values) return values
python
def __ordered_values_by_indexes(self, data, inds): """ Return values (intensities) by indexes. Used for multiscale graph cut. data = [[0 1 1], [0 2 2], [0 2 2]] inds = [[0 1 2], [3 4 4], [5 4 4]] return: [0, 1, 1, 0, 2, 0] If the data are not consistent, it will take the maximal value """ # get unique labels and their first indexes # lab, linds = np.unique(inds, return_index=True) # compute values by indexes # values = data.reshape(-1)[linds] # alternative slow implementation # if there are different data on same index, it will take # maximal value # lab = np.unique(inds) # values = [0]*len(lab) # for label in lab: # values[label] = np.max(data[inds == label]) # # values = np.asarray(values) # yet another implementation values = [None] * (np.max(inds) + 1) linear_inds = inds.ravel() linear_data = data.ravel() for i in range(0, len(linear_inds)): # going over all data pixels if values[linear_inds[i]] is None: # this index is found for first values[linear_inds[i]] = linear_data[i] elif values[linear_inds[i]] < linear_data[i]: # here can be changed maximal or minimal value values[linear_inds[i]] = linear_data[i] values = np.asarray(values) return values
[ "def", "__ordered_values_by_indexes", "(", "self", ",", "data", ",", "inds", ")", ":", "# get unique labels and their first indexes", "# lab, linds = np.unique(inds, return_index=True)", "# compute values by indexes", "# values = data.reshape(-1)[linds]", "# alternative slow implementation", "# if there are different data on same index, it will take", "# maximal value", "# lab = np.unique(inds)", "# values = [0]*len(lab)", "# for label in lab:", "# values[label] = np.max(data[inds == label])", "#", "# values = np.asarray(values)", "# yet another implementation", "values", "=", "[", "None", "]", "*", "(", "np", ".", "max", "(", "inds", ")", "+", "1", ")", "linear_inds", "=", "inds", ".", "ravel", "(", ")", "linear_data", "=", "data", ".", "ravel", "(", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "linear_inds", ")", ")", ":", "# going over all data pixels", "if", "values", "[", "linear_inds", "[", "i", "]", "]", "is", "None", ":", "# this index is found for first", "values", "[", "linear_inds", "[", "i", "]", "]", "=", "linear_data", "[", "i", "]", "elif", "values", "[", "linear_inds", "[", "i", "]", "]", "<", "linear_data", "[", "i", "]", ":", "# here can be changed maximal or minimal value", "values", "[", "linear_inds", "[", "i", "]", "]", "=", "linear_data", "[", "i", "]", "values", "=", "np", ".", "asarray", "(", "values", ")", "return", "values" ]
Return values (intensities) by indexes. Used for multiscale graph cut. data = [[0 1 1], [0 2 2], [0 2 2]] inds = [[0 1 2], [3 4 4], [5 4 4]] return: [0, 1, 1, 0, 2, 0] If the data are not consistent, it will take the maximal value
[ "Return", "values", "(", "intensities", ")", "by", "indexes", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L628-L678
mjirik/imcut
imcut/pycut.py
ImageGraphCut.__hi2lo_multiscale_indexes
def __hi2lo_multiscale_indexes(self, mask, orig_shape): # , zoom): """ Function computes multiscale indexes of ndarray. mask: Says where is original resolution (0) and where is small resolution (1). Mask is in small resolution. orig_shape: Original shape of input data. zoom: Usually number greater then 1 result = [[0 1 2], [3 4 4], [5 4 4]] """ mask_orig = zoom_to_shape(mask, orig_shape, dtype=np.int8) inds_small = np.arange(mask.size).reshape(mask.shape) inds_small_in_orig = zoom_to_shape(inds_small, orig_shape, dtype=np.int8) inds_orig = np.arange(np.prod(orig_shape)).reshape(orig_shape) # inds_orig = inds_orig * mask_orig inds_orig += np.max(inds_small_in_orig) + 1 # print 'indexes' # import py3DSeedEditor as ped # import pdb; pdb.set_trace() # BREAKPOINT # '==' is not the same as 'is' for numpy.array inds_small_in_orig[mask_orig == True] = inds_orig[mask_orig == True] # noqa inds = inds_small_in_orig # print np.max(inds) # print np.min(inds) inds = relabel_squeeze(inds) logger.debug( "Index after relabeling: %s", scipy.stats.describe(inds, axis=None) ) # logger.debug("Minimal index after relabeling: " + str(np.min(inds))) # inds_orig[mask_orig==True] = 0 # inds_small_in_orig[mask_orig==False] = 0 # inds = (inds_orig + np.max(inds_small_in_orig) + 1) + inds_small_in_orig return inds, mask_orig
python
def __hi2lo_multiscale_indexes(self, mask, orig_shape): # , zoom): """ Function computes multiscale indexes of ndarray. mask: Says where is original resolution (0) and where is small resolution (1). Mask is in small resolution. orig_shape: Original shape of input data. zoom: Usually number greater then 1 result = [[0 1 2], [3 4 4], [5 4 4]] """ mask_orig = zoom_to_shape(mask, orig_shape, dtype=np.int8) inds_small = np.arange(mask.size).reshape(mask.shape) inds_small_in_orig = zoom_to_shape(inds_small, orig_shape, dtype=np.int8) inds_orig = np.arange(np.prod(orig_shape)).reshape(orig_shape) # inds_orig = inds_orig * mask_orig inds_orig += np.max(inds_small_in_orig) + 1 # print 'indexes' # import py3DSeedEditor as ped # import pdb; pdb.set_trace() # BREAKPOINT # '==' is not the same as 'is' for numpy.array inds_small_in_orig[mask_orig == True] = inds_orig[mask_orig == True] # noqa inds = inds_small_in_orig # print np.max(inds) # print np.min(inds) inds = relabel_squeeze(inds) logger.debug( "Index after relabeling: %s", scipy.stats.describe(inds, axis=None) ) # logger.debug("Minimal index after relabeling: " + str(np.min(inds))) # inds_orig[mask_orig==True] = 0 # inds_small_in_orig[mask_orig==False] = 0 # inds = (inds_orig + np.max(inds_small_in_orig) + 1) + inds_small_in_orig return inds, mask_orig
[ "def", "__hi2lo_multiscale_indexes", "(", "self", ",", "mask", ",", "orig_shape", ")", ":", "# , zoom):", "mask_orig", "=", "zoom_to_shape", "(", "mask", ",", "orig_shape", ",", "dtype", "=", "np", ".", "int8", ")", "inds_small", "=", "np", ".", "arange", "(", "mask", ".", "size", ")", ".", "reshape", "(", "mask", ".", "shape", ")", "inds_small_in_orig", "=", "zoom_to_shape", "(", "inds_small", ",", "orig_shape", ",", "dtype", "=", "np", ".", "int8", ")", "inds_orig", "=", "np", ".", "arange", "(", "np", ".", "prod", "(", "orig_shape", ")", ")", ".", "reshape", "(", "orig_shape", ")", "# inds_orig = inds_orig * mask_orig", "inds_orig", "+=", "np", ".", "max", "(", "inds_small_in_orig", ")", "+", "1", "# print 'indexes'", "# import py3DSeedEditor as ped", "# import pdb; pdb.set_trace() # BREAKPOINT", "# '==' is not the same as 'is' for numpy.array", "inds_small_in_orig", "[", "mask_orig", "==", "True", "]", "=", "inds_orig", "[", "mask_orig", "==", "True", "]", "# noqa", "inds", "=", "inds_small_in_orig", "# print np.max(inds)", "# print np.min(inds)", "inds", "=", "relabel_squeeze", "(", "inds", ")", "logger", ".", "debug", "(", "\"Index after relabeling: %s\"", ",", "scipy", ".", "stats", ".", "describe", "(", "inds", ",", "axis", "=", "None", ")", ")", "# logger.debug(\"Minimal index after relabeling: \" + str(np.min(inds)))", "# inds_orig[mask_orig==True] = 0", "# inds_small_in_orig[mask_orig==False] = 0", "# inds = (inds_orig + np.max(inds_small_in_orig) + 1) + inds_small_in_orig", "return", "inds", ",", "mask_orig" ]
Function computes multiscale indexes of ndarray. mask: Says where is original resolution (0) and where is small resolution (1). Mask is in small resolution. orig_shape: Original shape of input data. zoom: Usually number greater then 1 result = [[0 1 2], [3 4 4], [5 4 4]]
[ "Function", "computes", "multiscale", "indexes", "of", "ndarray", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L680-L721
mjirik/imcut
imcut/pycut.py
ImageGraphCut.interactivity
def interactivity(self, min_val=None, max_val=None, qt_app=None): """ Interactive seed setting with 3d seed editor """ from .seed_editor_qt import QTSeedEditor from PyQt4.QtGui import QApplication if min_val is None: min_val = np.min(self.img) if max_val is None: max_val = np.max(self.img) window_c = (max_val + min_val) / 2 # .astype(np.int16) window_w = max_val - min_val # .astype(np.int16) if qt_app is None: qt_app = QApplication(sys.argv) pyed = QTSeedEditor( self.img, modeFun=self.interactivity_loop, voxelSize=self.voxelsize, seeds=self.seeds, volume_unit=self.volume_unit, ) pyed.changeC(window_c) pyed.changeW(window_w) qt_app.exec_()
python
def interactivity(self, min_val=None, max_val=None, qt_app=None): """ Interactive seed setting with 3d seed editor """ from .seed_editor_qt import QTSeedEditor from PyQt4.QtGui import QApplication if min_val is None: min_val = np.min(self.img) if max_val is None: max_val = np.max(self.img) window_c = (max_val + min_val) / 2 # .astype(np.int16) window_w = max_val - min_val # .astype(np.int16) if qt_app is None: qt_app = QApplication(sys.argv) pyed = QTSeedEditor( self.img, modeFun=self.interactivity_loop, voxelSize=self.voxelsize, seeds=self.seeds, volume_unit=self.volume_unit, ) pyed.changeC(window_c) pyed.changeW(window_w) qt_app.exec_()
[ "def", "interactivity", "(", "self", ",", "min_val", "=", "None", ",", "max_val", "=", "None", ",", "qt_app", "=", "None", ")", ":", "from", ".", "seed_editor_qt", "import", "QTSeedEditor", "from", "PyQt4", ".", "QtGui", "import", "QApplication", "if", "min_val", "is", "None", ":", "min_val", "=", "np", ".", "min", "(", "self", ".", "img", ")", "if", "max_val", "is", "None", ":", "max_val", "=", "np", ".", "max", "(", "self", ".", "img", ")", "window_c", "=", "(", "max_val", "+", "min_val", ")", "/", "2", "# .astype(np.int16)", "window_w", "=", "max_val", "-", "min_val", "# .astype(np.int16)", "if", "qt_app", "is", "None", ":", "qt_app", "=", "QApplication", "(", "sys", ".", "argv", ")", "pyed", "=", "QTSeedEditor", "(", "self", ".", "img", ",", "modeFun", "=", "self", ".", "interactivity_loop", ",", "voxelSize", "=", "self", ".", "voxelsize", ",", "seeds", "=", "self", ".", "seeds", ",", "volume_unit", "=", "self", ".", "volume_unit", ",", ")", "pyed", ".", "changeC", "(", "window_c", ")", "pyed", ".", "changeW", "(", "window_w", ")", "qt_app", ".", "exec_", "(", ")" ]
Interactive seed setting with 3d seed editor
[ "Interactive", "seed", "setting", "with", "3d", "seed", "editor" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L723-L753
mjirik/imcut
imcut/pycut.py
ImageGraphCut.set_seeds
def set_seeds(self, seeds): """ Function for manual seed setting. Sets variable seeds and prepares voxels for density model. :param seeds: ndarray (0 - nothing, 1 - object, 2 - background, 3 - object just hard constraints, no model training, 4 - background just hard constraints, no model training) """ if self.img.shape != seeds.shape: raise Exception("Seeds must be same size as input image") self.seeds = seeds.astype("int8") self.voxels1 = self.img[self.seeds == 1] self.voxels2 = self.img[self.seeds == 2]
python
def set_seeds(self, seeds): """ Function for manual seed setting. Sets variable seeds and prepares voxels for density model. :param seeds: ndarray (0 - nothing, 1 - object, 2 - background, 3 - object just hard constraints, no model training, 4 - background just hard constraints, no model training) """ if self.img.shape != seeds.shape: raise Exception("Seeds must be same size as input image") self.seeds = seeds.astype("int8") self.voxels1 = self.img[self.seeds == 1] self.voxels2 = self.img[self.seeds == 2]
[ "def", "set_seeds", "(", "self", ",", "seeds", ")", ":", "if", "self", ".", "img", ".", "shape", "!=", "seeds", ".", "shape", ":", "raise", "Exception", "(", "\"Seeds must be same size as input image\"", ")", "self", ".", "seeds", "=", "seeds", ".", "astype", "(", "\"int8\"", ")", "self", ".", "voxels1", "=", "self", ".", "img", "[", "self", ".", "seeds", "==", "1", "]", "self", ".", "voxels2", "=", "self", ".", "img", "[", "self", ".", "seeds", "==", "2", "]" ]
Function for manual seed setting. Sets variable seeds and prepares voxels for density model. :param seeds: ndarray (0 - nothing, 1 - object, 2 - background, 3 - object just hard constraints, no model training, 4 - background just hard constraints, no model training)
[ "Function", "for", "manual", "seed", "setting", ".", "Sets", "variable", "seeds", "and", "prepares", "voxels", "for", "density", "model", ".", ":", "param", "seeds", ":", "ndarray", "(", "0", "-", "nothing", "1", "-", "object", "2", "-", "background", "3", "-", "object", "just", "hard", "constraints", "no", "model", "training", "4", "-", "background", "just", "hard", "constraints", "no", "model", "training", ")" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L755-L768
mjirik/imcut
imcut/pycut.py
ImageGraphCut.run
def run(self, run_fit_model=True): """ Run the Graph Cut segmentation according to preset parameters. :param run_fit_model: Allow to skip model fit when the model is prepared before :return: """ if run_fit_model: self.fit_model(self.img, self.voxelsize, self.seeds) self._start_time = time.time() if self.segparams["method"].lower() in ("graphcut", "gc"): self.__single_scale_gc_run() elif self.segparams["method"].lower() in ( "multiscale_graphcut", "multiscale_gc", "msgc", "msgc_lo2hi", "lo2hi", "multiscale_graphcut_lo2hi", ): logger.debug("performing multiscale Graph-Cut lo2hi") self.__multiscale_gc_lo2hi_run() elif self.segparams["method"].lower() in ( "msgc_hi2lo", "hi2lo", "multiscale_graphcut_hi2lo", ): logger.debug("performing multiscale Graph-Cut hi2lo") self.__multiscale_gc_hi2lo_run() else: logger.error("Unknown segmentation method: " + self.segparams["method"])
python
def run(self, run_fit_model=True): """ Run the Graph Cut segmentation according to preset parameters. :param run_fit_model: Allow to skip model fit when the model is prepared before :return: """ if run_fit_model: self.fit_model(self.img, self.voxelsize, self.seeds) self._start_time = time.time() if self.segparams["method"].lower() in ("graphcut", "gc"): self.__single_scale_gc_run() elif self.segparams["method"].lower() in ( "multiscale_graphcut", "multiscale_gc", "msgc", "msgc_lo2hi", "lo2hi", "multiscale_graphcut_lo2hi", ): logger.debug("performing multiscale Graph-Cut lo2hi") self.__multiscale_gc_lo2hi_run() elif self.segparams["method"].lower() in ( "msgc_hi2lo", "hi2lo", "multiscale_graphcut_hi2lo", ): logger.debug("performing multiscale Graph-Cut hi2lo") self.__multiscale_gc_hi2lo_run() else: logger.error("Unknown segmentation method: " + self.segparams["method"])
[ "def", "run", "(", "self", ",", "run_fit_model", "=", "True", ")", ":", "if", "run_fit_model", ":", "self", ".", "fit_model", "(", "self", ".", "img", ",", "self", ".", "voxelsize", ",", "self", ".", "seeds", ")", "self", ".", "_start_time", "=", "time", ".", "time", "(", ")", "if", "self", ".", "segparams", "[", "\"method\"", "]", ".", "lower", "(", ")", "in", "(", "\"graphcut\"", ",", "\"gc\"", ")", ":", "self", ".", "__single_scale_gc_run", "(", ")", "elif", "self", ".", "segparams", "[", "\"method\"", "]", ".", "lower", "(", ")", "in", "(", "\"multiscale_graphcut\"", ",", "\"multiscale_gc\"", ",", "\"msgc\"", ",", "\"msgc_lo2hi\"", ",", "\"lo2hi\"", ",", "\"multiscale_graphcut_lo2hi\"", ",", ")", ":", "logger", ".", "debug", "(", "\"performing multiscale Graph-Cut lo2hi\"", ")", "self", ".", "__multiscale_gc_lo2hi_run", "(", ")", "elif", "self", ".", "segparams", "[", "\"method\"", "]", ".", "lower", "(", ")", "in", "(", "\"msgc_hi2lo\"", ",", "\"hi2lo\"", ",", "\"multiscale_graphcut_hi2lo\"", ",", ")", ":", "logger", ".", "debug", "(", "\"performing multiscale Graph-Cut hi2lo\"", ")", "self", ".", "__multiscale_gc_hi2lo_run", "(", ")", "else", ":", "logger", ".", "error", "(", "\"Unknown segmentation method: \"", "+", "self", ".", "segparams", "[", "\"method\"", "]", ")" ]
Run the Graph Cut segmentation according to preset parameters. :param run_fit_model: Allow to skip model fit when the model is prepared before :return:
[ "Run", "the", "Graph", "Cut", "segmentation", "according", "to", "preset", "parameters", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L770-L802
mjirik/imcut
imcut/pycut.py
ImageGraphCut.__set_hard_hard_constraints
def __set_hard_hard_constraints(self, tdata1, tdata2, seeds): """ it works with seed labels: 0: nothing 1: object 1 - full seeds 2: object 2 - full seeds 3: object 1 - not a training seeds 4: object 2 - not a training seeds """ seeds_mask = (seeds == 1) | (seeds == 3) tdata2[seeds_mask] = np.max(tdata2) + 1 tdata1[seeds_mask] = 0 seeds_mask = (seeds == 2) | (seeds == 4) tdata1[seeds_mask] = np.max(tdata1) + 1 tdata2[seeds_mask] = 0 return tdata1, tdata2
python
def __set_hard_hard_constraints(self, tdata1, tdata2, seeds): """ it works with seed labels: 0: nothing 1: object 1 - full seeds 2: object 2 - full seeds 3: object 1 - not a training seeds 4: object 2 - not a training seeds """ seeds_mask = (seeds == 1) | (seeds == 3) tdata2[seeds_mask] = np.max(tdata2) + 1 tdata1[seeds_mask] = 0 seeds_mask = (seeds == 2) | (seeds == 4) tdata1[seeds_mask] = np.max(tdata1) + 1 tdata2[seeds_mask] = 0 return tdata1, tdata2
[ "def", "__set_hard_hard_constraints", "(", "self", ",", "tdata1", ",", "tdata2", ",", "seeds", ")", ":", "seeds_mask", "=", "(", "seeds", "==", "1", ")", "|", "(", "seeds", "==", "3", ")", "tdata2", "[", "seeds_mask", "]", "=", "np", ".", "max", "(", "tdata2", ")", "+", "1", "tdata1", "[", "seeds_mask", "]", "=", "0", "seeds_mask", "=", "(", "seeds", "==", "2", ")", "|", "(", "seeds", "==", "4", ")", "tdata1", "[", "seeds_mask", "]", "=", "np", ".", "max", "(", "tdata1", ")", "+", "1", "tdata2", "[", "seeds_mask", "]", "=", "0", "return", "tdata1", ",", "tdata2" ]
it works with seed labels: 0: nothing 1: object 1 - full seeds 2: object 2 - full seeds 3: object 1 - not a training seeds 4: object 2 - not a training seeds
[ "it", "works", "with", "seed", "labels", ":", "0", ":", "nothing", "1", ":", "object", "1", "-", "full", "seeds", "2", ":", "object", "2", "-", "full", "seeds", "3", ":", "object", "1", "-", "not", "a", "training", "seeds", "4", ":", "object", "2", "-", "not", "a", "training", "seeds" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L834-L851
mjirik/imcut
imcut/pycut.py
ImageGraphCut.__similarity_for_tlinks_obj_bgr
def __similarity_for_tlinks_obj_bgr( self, data, voxelsize, # voxels1, voxels2, # seeds, otherfeatures=None ): """ Compute edge values for graph cut tlinks based on image intensity and texture. """ # self.fit_model(data, voxelsize, seeds) # There is a need to have small vaues for good fit # R(obj) = -ln( Pr (Ip | O) ) # R(bck) = -ln( Pr (Ip | B) ) # Boykov2001b # ln is computed in likelihood tdata1 = (-(self.mdl.likelihood_from_image(data, voxelsize, 1))) * 10 tdata2 = (-(self.mdl.likelihood_from_image(data, voxelsize, 2))) * 10 # to spare some memory dtype = np.int16 if np.any(tdata1 > 32760): dtype = np.float32 if np.any(tdata2 > 32760): dtype = np.float32 if self.segparams["use_apriori_if_available"] and self.apriori is not None: logger.debug("using apriori information") gamma = self.segparams["apriori_gamma"] a1 = (-np.log(self.apriori * 0.998 + 0.001)) * 10 a2 = (-np.log(0.999 - (self.apriori * 0.998))) * 10 # logger.debug('max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1))) # logger.debug('max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2))) # logger.debug('max ' + str(np.max(a1)) + ' min ' + str(np.min(a1))) # logger.debug('max ' + str(np.max(a2)) + ' min ' + str(np.min(a2))) tdata1u = (((1 - gamma) * tdata1) + (gamma * a1)).astype(dtype) tdata2u = (((1 - gamma) * tdata2) + (gamma * a2)).astype(dtype) tdata1 = tdata1u tdata2 = tdata2u # logger.debug(' max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1))) # logger.debug(' max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2))) # logger.debug('gamma ' + str(gamma)) # import sed3 # ed = sed3.show_slices(tdata1) # ed = sed3.show_slices(tdata2) del tdata1u del tdata2u del a1 del a2 # if np.any(tdata1 < 0) or np.any(tdata2 <0): # logger.error("Problem with tlinks. Likelihood is < 0") # if self.debug_images: # self.__show_debug_tdata_images(tdata1, tdata2, suptitle="likelihood") return tdata1, tdata2
python
def __similarity_for_tlinks_obj_bgr( self, data, voxelsize, # voxels1, voxels2, # seeds, otherfeatures=None ): """ Compute edge values for graph cut tlinks based on image intensity and texture. """ # self.fit_model(data, voxelsize, seeds) # There is a need to have small vaues for good fit # R(obj) = -ln( Pr (Ip | O) ) # R(bck) = -ln( Pr (Ip | B) ) # Boykov2001b # ln is computed in likelihood tdata1 = (-(self.mdl.likelihood_from_image(data, voxelsize, 1))) * 10 tdata2 = (-(self.mdl.likelihood_from_image(data, voxelsize, 2))) * 10 # to spare some memory dtype = np.int16 if np.any(tdata1 > 32760): dtype = np.float32 if np.any(tdata2 > 32760): dtype = np.float32 if self.segparams["use_apriori_if_available"] and self.apriori is not None: logger.debug("using apriori information") gamma = self.segparams["apriori_gamma"] a1 = (-np.log(self.apriori * 0.998 + 0.001)) * 10 a2 = (-np.log(0.999 - (self.apriori * 0.998))) * 10 # logger.debug('max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1))) # logger.debug('max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2))) # logger.debug('max ' + str(np.max(a1)) + ' min ' + str(np.min(a1))) # logger.debug('max ' + str(np.max(a2)) + ' min ' + str(np.min(a2))) tdata1u = (((1 - gamma) * tdata1) + (gamma * a1)).astype(dtype) tdata2u = (((1 - gamma) * tdata2) + (gamma * a2)).astype(dtype) tdata1 = tdata1u tdata2 = tdata2u # logger.debug(' max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1))) # logger.debug(' max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2))) # logger.debug('gamma ' + str(gamma)) # import sed3 # ed = sed3.show_slices(tdata1) # ed = sed3.show_slices(tdata2) del tdata1u del tdata2u del a1 del a2 # if np.any(tdata1 < 0) or np.any(tdata2 <0): # logger.error("Problem with tlinks. Likelihood is < 0") # if self.debug_images: # self.__show_debug_tdata_images(tdata1, tdata2, suptitle="likelihood") return tdata1, tdata2
[ "def", "__similarity_for_tlinks_obj_bgr", "(", "self", ",", "data", ",", "voxelsize", ",", "# voxels1, voxels2,", "# seeds, otherfeatures=None", ")", ":", "# self.fit_model(data, voxelsize, seeds)", "# There is a need to have small vaues for good fit", "# R(obj) = -ln( Pr (Ip | O) )", "# R(bck) = -ln( Pr (Ip | B) )", "# Boykov2001b", "# ln is computed in likelihood", "tdata1", "=", "(", "-", "(", "self", ".", "mdl", ".", "likelihood_from_image", "(", "data", ",", "voxelsize", ",", "1", ")", ")", ")", "*", "10", "tdata2", "=", "(", "-", "(", "self", ".", "mdl", ".", "likelihood_from_image", "(", "data", ",", "voxelsize", ",", "2", ")", ")", ")", "*", "10", "# to spare some memory", "dtype", "=", "np", ".", "int16", "if", "np", ".", "any", "(", "tdata1", ">", "32760", ")", ":", "dtype", "=", "np", ".", "float32", "if", "np", ".", "any", "(", "tdata2", ">", "32760", ")", ":", "dtype", "=", "np", ".", "float32", "if", "self", ".", "segparams", "[", "\"use_apriori_if_available\"", "]", "and", "self", ".", "apriori", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"using apriori information\"", ")", "gamma", "=", "self", ".", "segparams", "[", "\"apriori_gamma\"", "]", "a1", "=", "(", "-", "np", ".", "log", "(", "self", ".", "apriori", "*", "0.998", "+", "0.001", ")", ")", "*", "10", "a2", "=", "(", "-", "np", ".", "log", "(", "0.999", "-", "(", "self", ".", "apriori", "*", "0.998", ")", ")", ")", "*", "10", "# logger.debug('max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1)))", "# logger.debug('max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2)))", "# logger.debug('max ' + str(np.max(a1)) + ' min ' + str(np.min(a1)))", "# logger.debug('max ' + str(np.max(a2)) + ' min ' + str(np.min(a2)))", "tdata1u", "=", "(", "(", "(", "1", "-", "gamma", ")", "*", "tdata1", ")", "+", "(", "gamma", "*", "a1", ")", ")", ".", "astype", "(", "dtype", ")", "tdata2u", "=", "(", "(", "(", "1", "-", "gamma", ")", "*", "tdata2", ")", "+", "(", "gamma", "*", "a2", ")", ")", ".", "astype", "(", "dtype", ")", "tdata1", "=", "tdata1u", "tdata2", "=", "tdata2u", "# logger.debug(' max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1)))", "# logger.debug(' max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2)))", "# logger.debug('gamma ' + str(gamma))", "# import sed3", "# ed = sed3.show_slices(tdata1)", "# ed = sed3.show_slices(tdata2)", "del", "tdata1u", "del", "tdata2u", "del", "a1", "del", "a2", "# if np.any(tdata1 < 0) or np.any(tdata2 <0):", "# logger.error(\"Problem with tlinks. Likelihood is < 0\")", "# if self.debug_images:", "# self.__show_debug_tdata_images(tdata1, tdata2, suptitle=\"likelihood\")", "return", "tdata1", ",", "tdata2" ]
Compute edge values for graph cut tlinks based on image intensity and texture.
[ "Compute", "edge", "values", "for", "graph", "cut", "tlinks", "based", "on", "image", "intensity", "and", "texture", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1023-L1080
mjirik/imcut
imcut/pycut.py
ImageGraphCut.__create_nlinks
def __create_nlinks(self, data, inds=None, boundary_penalties_fcn=None): """ Compute nlinks grid from data shape information. For boundary penalties are data (intensities) values are used. ins: Default is None. Used for multiscale GC. This are indexes of multiscale pixels. Next example shows one superpixel witn index 2. inds = [ [1 2 2], [3 2 2], [4 5 6]] boundary_penalties_fcn: is function with one argument - axis. It can it can be used for setting penalty weights between neighbooring pixels. """ # use the gerneral graph algorithm # first, we construct the grid graph start = time.time() if inds is None: inds = np.arange(data.size).reshape(data.shape) # if not self.segparams['use_boundary_penalties'] and \ # boundary_penalties_fcn is None : if boundary_penalties_fcn is None: # This is faster for some specific format edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()] edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()] edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()] else: logger.info("use_boundary_penalties") bpw = self.segparams["boundary_penalties_weight"] bpa = boundary_penalties_fcn(2) # id1=inds[:, :, :-1].ravel() edgx = np.c_[ inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel(), # cc * np.ones(id1.shape) bpw * bpa[:, :, 1:].ravel(), ] bpa = boundary_penalties_fcn(1) # id1 =inds[:, 1:, :].ravel() edgy = np.c_[ inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel(), # cc * np.ones(id1.shape)] bpw * bpa[:, 1:, :].ravel(), ] bpa = boundary_penalties_fcn(0) # id1 = inds[1:, :, :].ravel() edgz = np.c_[ inds[:-1, :, :].ravel(), inds[1:, :, :].ravel(), # cc * np.ones(id1.shape)] bpw * bpa[1:, :, :].ravel(), ] # import pdb; pdb.set_trace() edges = np.vstack([edgx, edgy, edgz]).astype(np.int32) # edges - seznam indexu hran, kteres spolu sousedi\ elapsed = time.time() - start self.stats["_create_nlinks time"] = elapsed logger.info("__create nlinks time " + str(elapsed)) return edges
python
def __create_nlinks(self, data, inds=None, boundary_penalties_fcn=None): """ Compute nlinks grid from data shape information. For boundary penalties are data (intensities) values are used. ins: Default is None. Used for multiscale GC. This are indexes of multiscale pixels. Next example shows one superpixel witn index 2. inds = [ [1 2 2], [3 2 2], [4 5 6]] boundary_penalties_fcn: is function with one argument - axis. It can it can be used for setting penalty weights between neighbooring pixels. """ # use the gerneral graph algorithm # first, we construct the grid graph start = time.time() if inds is None: inds = np.arange(data.size).reshape(data.shape) # if not self.segparams['use_boundary_penalties'] and \ # boundary_penalties_fcn is None : if boundary_penalties_fcn is None: # This is faster for some specific format edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()] edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()] edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()] else: logger.info("use_boundary_penalties") bpw = self.segparams["boundary_penalties_weight"] bpa = boundary_penalties_fcn(2) # id1=inds[:, :, :-1].ravel() edgx = np.c_[ inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel(), # cc * np.ones(id1.shape) bpw * bpa[:, :, 1:].ravel(), ] bpa = boundary_penalties_fcn(1) # id1 =inds[:, 1:, :].ravel() edgy = np.c_[ inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel(), # cc * np.ones(id1.shape)] bpw * bpa[:, 1:, :].ravel(), ] bpa = boundary_penalties_fcn(0) # id1 = inds[1:, :, :].ravel() edgz = np.c_[ inds[:-1, :, :].ravel(), inds[1:, :, :].ravel(), # cc * np.ones(id1.shape)] bpw * bpa[1:, :, :].ravel(), ] # import pdb; pdb.set_trace() edges = np.vstack([edgx, edgy, edgz]).astype(np.int32) # edges - seznam indexu hran, kteres spolu sousedi\ elapsed = time.time() - start self.stats["_create_nlinks time"] = elapsed logger.info("__create nlinks time " + str(elapsed)) return edges
[ "def", "__create_nlinks", "(", "self", ",", "data", ",", "inds", "=", "None", ",", "boundary_penalties_fcn", "=", "None", ")", ":", "# use the gerneral graph algorithm", "# first, we construct the grid graph", "start", "=", "time", ".", "time", "(", ")", "if", "inds", "is", "None", ":", "inds", "=", "np", ".", "arange", "(", "data", ".", "size", ")", ".", "reshape", "(", "data", ".", "shape", ")", "# if not self.segparams['use_boundary_penalties'] and \\", "# boundary_penalties_fcn is None :", "if", "boundary_penalties_fcn", "is", "None", ":", "# This is faster for some specific format", "edgx", "=", "np", ".", "c_", "[", "inds", "[", ":", ",", ":", ",", ":", "-", "1", "]", ".", "ravel", "(", ")", ",", "inds", "[", ":", ",", ":", ",", "1", ":", "]", ".", "ravel", "(", ")", "]", "edgy", "=", "np", ".", "c_", "[", "inds", "[", ":", ",", ":", "-", "1", ",", ":", "]", ".", "ravel", "(", ")", ",", "inds", "[", ":", ",", "1", ":", ",", ":", "]", ".", "ravel", "(", ")", "]", "edgz", "=", "np", ".", "c_", "[", "inds", "[", ":", "-", "1", ",", ":", ",", ":", "]", ".", "ravel", "(", ")", ",", "inds", "[", "1", ":", ",", ":", ",", ":", "]", ".", "ravel", "(", ")", "]", "else", ":", "logger", ".", "info", "(", "\"use_boundary_penalties\"", ")", "bpw", "=", "self", ".", "segparams", "[", "\"boundary_penalties_weight\"", "]", "bpa", "=", "boundary_penalties_fcn", "(", "2", ")", "# id1=inds[:, :, :-1].ravel()", "edgx", "=", "np", ".", "c_", "[", "inds", "[", ":", ",", ":", ",", ":", "-", "1", "]", ".", "ravel", "(", ")", ",", "inds", "[", ":", ",", ":", ",", "1", ":", "]", ".", "ravel", "(", ")", ",", "# cc * np.ones(id1.shape)", "bpw", "*", "bpa", "[", ":", ",", ":", ",", "1", ":", "]", ".", "ravel", "(", ")", ",", "]", "bpa", "=", "boundary_penalties_fcn", "(", "1", ")", "# id1 =inds[:, 1:, :].ravel()", "edgy", "=", "np", ".", "c_", "[", "inds", "[", ":", ",", ":", "-", "1", ",", ":", "]", ".", "ravel", "(", ")", ",", "inds", "[", ":", ",", "1", ":", ",", ":", "]", ".", "ravel", "(", ")", ",", "# cc * np.ones(id1.shape)]", "bpw", "*", "bpa", "[", ":", ",", "1", ":", ",", ":", "]", ".", "ravel", "(", ")", ",", "]", "bpa", "=", "boundary_penalties_fcn", "(", "0", ")", "# id1 = inds[1:, :, :].ravel()", "edgz", "=", "np", ".", "c_", "[", "inds", "[", ":", "-", "1", ",", ":", ",", ":", "]", ".", "ravel", "(", ")", ",", "inds", "[", "1", ":", ",", ":", ",", ":", "]", ".", "ravel", "(", ")", ",", "# cc * np.ones(id1.shape)]", "bpw", "*", "bpa", "[", "1", ":", ",", ":", ",", ":", "]", ".", "ravel", "(", ")", ",", "]", "# import pdb; pdb.set_trace()", "edges", "=", "np", ".", "vstack", "(", "[", "edgx", ",", "edgy", ",", "edgz", "]", ")", ".", "astype", "(", "np", ".", "int32", ")", "# edges - seznam indexu hran, kteres spolu sousedi\\", "elapsed", "=", "time", ".", "time", "(", ")", "-", "start", "self", ".", "stats", "[", "\"_create_nlinks time\"", "]", "=", "elapsed", "logger", ".", "info", "(", "\"__create nlinks time \"", "+", "str", "(", "elapsed", ")", ")", "return", "edges" ]
Compute nlinks grid from data shape information. For boundary penalties are data (intensities) values are used. ins: Default is None. Used for multiscale GC. This are indexes of multiscale pixels. Next example shows one superpixel witn index 2. inds = [ [1 2 2], [3 2 2], [4 5 6]] boundary_penalties_fcn: is function with one argument - axis. It can it can be used for setting penalty weights between neighbooring pixels.
[ "Compute", "nlinks", "grid", "from", "data", "shape", "information", ".", "For", "boundary", "penalties", "are", "data", "(", "intensities", ")", "values", "are", "used", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1152-L1220
mjirik/imcut
imcut/pycut.py
ImageGraphCut.debug_get_reconstructed_similarity
def debug_get_reconstructed_similarity( self, data3d=None, voxelsize=None, seeds=None, area_weight=1, hard_constraints=True, return_unariesalt=False, ): """ Use actual model to calculate similarity. If no input is given the last image is used. :param data3d: :param voxelsize: :param seeds: :param area_weight: :param hard_constraints: :param return_unariesalt: :return: """ if data3d is None: data3d = self.img if voxelsize is None: voxelsize = self.voxelsize if seeds is None: seeds = self.seeds unariesalt = self.__create_tlinks( data3d, voxelsize, # voxels1, voxels2, seeds, area_weight, hard_constraints, ) if return_unariesalt: return unariesalt else: return self._reshape_unariesalt_to_similarity(unariesalt, data3d.shape)
python
def debug_get_reconstructed_similarity( self, data3d=None, voxelsize=None, seeds=None, area_weight=1, hard_constraints=True, return_unariesalt=False, ): """ Use actual model to calculate similarity. If no input is given the last image is used. :param data3d: :param voxelsize: :param seeds: :param area_weight: :param hard_constraints: :param return_unariesalt: :return: """ if data3d is None: data3d = self.img if voxelsize is None: voxelsize = self.voxelsize if seeds is None: seeds = self.seeds unariesalt = self.__create_tlinks( data3d, voxelsize, # voxels1, voxels2, seeds, area_weight, hard_constraints, ) if return_unariesalt: return unariesalt else: return self._reshape_unariesalt_to_similarity(unariesalt, data3d.shape)
[ "def", "debug_get_reconstructed_similarity", "(", "self", ",", "data3d", "=", "None", ",", "voxelsize", "=", "None", ",", "seeds", "=", "None", ",", "area_weight", "=", "1", ",", "hard_constraints", "=", "True", ",", "return_unariesalt", "=", "False", ",", ")", ":", "if", "data3d", "is", "None", ":", "data3d", "=", "self", ".", "img", "if", "voxelsize", "is", "None", ":", "voxelsize", "=", "self", ".", "voxelsize", "if", "seeds", "is", "None", ":", "seeds", "=", "self", ".", "seeds", "unariesalt", "=", "self", ".", "__create_tlinks", "(", "data3d", ",", "voxelsize", ",", "# voxels1, voxels2,", "seeds", ",", "area_weight", ",", "hard_constraints", ",", ")", "if", "return_unariesalt", ":", "return", "unariesalt", "else", ":", "return", "self", ".", "_reshape_unariesalt_to_similarity", "(", "unariesalt", ",", "data3d", ".", "shape", ")" ]
Use actual model to calculate similarity. If no input is given the last image is used. :param data3d: :param voxelsize: :param seeds: :param area_weight: :param hard_constraints: :param return_unariesalt: :return:
[ "Use", "actual", "model", "to", "calculate", "similarity", ".", "If", "no", "input", "is", "given", "the", "last", "image", "is", "used", ".", ":", "param", "data3d", ":", ":", "param", "voxelsize", ":", ":", "param", "seeds", ":", ":", "param", "area_weight", ":", ":", "param", "hard_constraints", ":", ":", "param", "return_unariesalt", ":", ":", "return", ":" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1222-L1259
mjirik/imcut
imcut/pycut.py
ImageGraphCut.debug_show_reconstructed_similarity
def debug_show_reconstructed_similarity( self, data3d=None, voxelsize=None, seeds=None, area_weight=1, hard_constraints=True, show=True, bins=20, slice_number=None, ): """ Show tlinks. :param data3d: ndarray with input data :param voxelsize: :param seeds: :param area_weight: :param hard_constraints: :param show: :param bins: histogram bins number :param slice_number: :return: """ unariesalt = self.debug_get_reconstructed_similarity( data3d, voxelsize=voxelsize, seeds=seeds, area_weight=area_weight, hard_constraints=hard_constraints, return_unariesalt=True, ) self._debug_show_unariesalt( unariesalt, show=show, bins=bins, slice_number=slice_number )
python
def debug_show_reconstructed_similarity( self, data3d=None, voxelsize=None, seeds=None, area_weight=1, hard_constraints=True, show=True, bins=20, slice_number=None, ): """ Show tlinks. :param data3d: ndarray with input data :param voxelsize: :param seeds: :param area_weight: :param hard_constraints: :param show: :param bins: histogram bins number :param slice_number: :return: """ unariesalt = self.debug_get_reconstructed_similarity( data3d, voxelsize=voxelsize, seeds=seeds, area_weight=area_weight, hard_constraints=hard_constraints, return_unariesalt=True, ) self._debug_show_unariesalt( unariesalt, show=show, bins=bins, slice_number=slice_number )
[ "def", "debug_show_reconstructed_similarity", "(", "self", ",", "data3d", "=", "None", ",", "voxelsize", "=", "None", ",", "seeds", "=", "None", ",", "area_weight", "=", "1", ",", "hard_constraints", "=", "True", ",", "show", "=", "True", ",", "bins", "=", "20", ",", "slice_number", "=", "None", ",", ")", ":", "unariesalt", "=", "self", ".", "debug_get_reconstructed_similarity", "(", "data3d", ",", "voxelsize", "=", "voxelsize", ",", "seeds", "=", "seeds", ",", "area_weight", "=", "area_weight", ",", "hard_constraints", "=", "hard_constraints", ",", "return_unariesalt", "=", "True", ",", ")", "self", ".", "_debug_show_unariesalt", "(", "unariesalt", ",", "show", "=", "show", ",", "bins", "=", "bins", ",", "slice_number", "=", "slice_number", ")" ]
Show tlinks. :param data3d: ndarray with input data :param voxelsize: :param seeds: :param area_weight: :param hard_constraints: :param show: :param bins: histogram bins number :param slice_number: :return:
[ "Show", "tlinks", ".", ":", "param", "data3d", ":", "ndarray", "with", "input", "data", ":", "param", "voxelsize", ":", ":", "param", "seeds", ":", ":", "param", "area_weight", ":", ":", "param", "hard_constraints", ":", ":", "param", "show", ":", ":", "param", "bins", ":", "histogram", "bins", "number", ":", "param", "slice_number", ":", ":", "return", ":" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1261-L1296
mjirik/imcut
imcut/pycut.py
ImageGraphCut.debug_inspect_node
def debug_inspect_node(self, node_msindex): """ Get info about the node. See pycut.inspect_node() for details. Processing is done in temporary shape. :param node_seed: :return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds """ return inspect_node(self.nlinks, self.unariesalt2, self.msinds, node_msindex)
python
def debug_inspect_node(self, node_msindex): """ Get info about the node. See pycut.inspect_node() for details. Processing is done in temporary shape. :param node_seed: :return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds """ return inspect_node(self.nlinks, self.unariesalt2, self.msinds, node_msindex)
[ "def", "debug_inspect_node", "(", "self", ",", "node_msindex", ")", ":", "return", "inspect_node", "(", "self", ".", "nlinks", ",", "self", ".", "unariesalt2", ",", "self", ".", "msinds", ",", "node_msindex", ")" ]
Get info about the node. See pycut.inspect_node() for details. Processing is done in temporary shape. :param node_seed: :return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds
[ "Get", "info", "about", "the", "node", ".", "See", "pycut", ".", "inspect_node", "()", "for", "details", ".", "Processing", "is", "done", "in", "temporary", "shape", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1298-L1306
mjirik/imcut
imcut/pycut.py
ImageGraphCut.debug_interactive_inspect_node
def debug_interactive_inspect_node(self): """ Call after segmentation to see selected node neighborhood. User have to select one node by click. :return: """ if ( np.sum( np.abs( np.asarray(self.msinds.shape) - np.asarray(self.segmentation.shape) ) ) == 0 ): segmentation = self.segmentation else: segmentation = self.temp_msgc_resized_segmentation logger.info("Click to select one voxel of interest") import sed3 ed = sed3.sed3(self.msinds, contour=segmentation == 0) ed.show() edseeds = ed.seeds node_msindex = get_node_msindex(self.msinds, edseeds) node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds = self.debug_inspect_node( node_msindex ) import sed3 ed = sed3.sed3( self.msinds, contour=segmentation == 0, seeds=node_neighboor_seeds ) ed.show() return ( node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds, node_msindex, )
python
def debug_interactive_inspect_node(self): """ Call after segmentation to see selected node neighborhood. User have to select one node by click. :return: """ if ( np.sum( np.abs( np.asarray(self.msinds.shape) - np.asarray(self.segmentation.shape) ) ) == 0 ): segmentation = self.segmentation else: segmentation = self.temp_msgc_resized_segmentation logger.info("Click to select one voxel of interest") import sed3 ed = sed3.sed3(self.msinds, contour=segmentation == 0) ed.show() edseeds = ed.seeds node_msindex = get_node_msindex(self.msinds, edseeds) node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds = self.debug_inspect_node( node_msindex ) import sed3 ed = sed3.sed3( self.msinds, contour=segmentation == 0, seeds=node_neighboor_seeds ) ed.show() return ( node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds, node_msindex, )
[ "def", "debug_interactive_inspect_node", "(", "self", ")", ":", "if", "(", "np", ".", "sum", "(", "np", ".", "abs", "(", "np", ".", "asarray", "(", "self", ".", "msinds", ".", "shape", ")", "-", "np", ".", "asarray", "(", "self", ".", "segmentation", ".", "shape", ")", ")", ")", "==", "0", ")", ":", "segmentation", "=", "self", ".", "segmentation", "else", ":", "segmentation", "=", "self", ".", "temp_msgc_resized_segmentation", "logger", ".", "info", "(", "\"Click to select one voxel of interest\"", ")", "import", "sed3", "ed", "=", "sed3", ".", "sed3", "(", "self", ".", "msinds", ",", "contour", "=", "segmentation", "==", "0", ")", "ed", ".", "show", "(", ")", "edseeds", "=", "ed", ".", "seeds", "node_msindex", "=", "get_node_msindex", "(", "self", ".", "msinds", ",", "edseeds", ")", "node_unariesalt", ",", "node_neighboor_edges_and_weights", ",", "node_neighboor_seeds", "=", "self", ".", "debug_inspect_node", "(", "node_msindex", ")", "import", "sed3", "ed", "=", "sed3", ".", "sed3", "(", "self", ".", "msinds", ",", "contour", "=", "segmentation", "==", "0", ",", "seeds", "=", "node_neighboor_seeds", ")", "ed", ".", "show", "(", ")", "return", "(", "node_unariesalt", ",", "node_neighboor_edges_and_weights", ",", "node_neighboor_seeds", ",", "node_msindex", ",", ")" ]
Call after segmentation to see selected node neighborhood. User have to select one node by click. :return:
[ "Call", "after", "segmentation", "to", "see", "selected", "node", "neighborhood", ".", "User", "have", "to", "select", "one", "node", "by", "click", ".", ":", "return", ":" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1318-L1359
mjirik/imcut
imcut/pycut.py
ImageGraphCut._ssgc_prepare_data_and_run_computation
def _ssgc_prepare_data_and_run_computation( self, # voxels1, voxels2, hard_constraints=True, area_weight=1, ): """ Setting of data. You need set seeds if you want use hard_constraints. """ # from PyQt4.QtCore import pyqtRemoveInputHook # pyqtRemoveInputHook() # import pdb; pdb.set_trace() # BREAKPOINT unariesalt = self.__create_tlinks( self.img, self.voxelsize, # voxels1, voxels2, self.seeds, area_weight, hard_constraints, ) # některém testu organ semgmentation dosahují unaries -15. což je podiné # stačí vyhodit print před if a je to vidět logger.debug("unaries %.3g , %.3g" % (np.max(unariesalt), np.min(unariesalt))) # create potts pairwise # pairwiseAlpha = -10 pairwise = -(np.eye(2) - 1) pairwise = (self.segparams["pairwise_alpha"] * pairwise).astype(np.int32) # pairwise = np.array([[0,30],[30,0]]).astype(np.int32) # print pairwise self.iparams = {} if self.segparams["use_boundary_penalties"]: sigma = self.segparams["boundary_penalties_sigma"] # set boundary penalties function # Default are penalties based on intensity differences boundary_penalties_fcn = lambda ax: self._boundary_penalties_array( axis=ax, sigma=sigma ) else: boundary_penalties_fcn = None nlinks = self.__create_nlinks( self.img, boundary_penalties_fcn=boundary_penalties_fcn ) self.stats["tlinks shape"].append(unariesalt.reshape(-1, 2).shape) self.stats["nlinks shape"].append(nlinks.shape) # we flatten the unaries # result_graph = cut_from_graph(nlinks, unaries.reshape(-1, 2), # pairwise) start = time.time() if self.debug_images: self._debug_show_unariesalt(unariesalt) result_graph = pygco.cut_from_graph(nlinks, unariesalt.reshape(-1, 2), pairwise) elapsed = time.time() - start self.stats["gc time"] = elapsed result_labeling = result_graph.reshape(self.img.shape) return result_labeling
python
def _ssgc_prepare_data_and_run_computation( self, # voxels1, voxels2, hard_constraints=True, area_weight=1, ): """ Setting of data. You need set seeds if you want use hard_constraints. """ # from PyQt4.QtCore import pyqtRemoveInputHook # pyqtRemoveInputHook() # import pdb; pdb.set_trace() # BREAKPOINT unariesalt = self.__create_tlinks( self.img, self.voxelsize, # voxels1, voxels2, self.seeds, area_weight, hard_constraints, ) # některém testu organ semgmentation dosahují unaries -15. což je podiné # stačí vyhodit print před if a je to vidět logger.debug("unaries %.3g , %.3g" % (np.max(unariesalt), np.min(unariesalt))) # create potts pairwise # pairwiseAlpha = -10 pairwise = -(np.eye(2) - 1) pairwise = (self.segparams["pairwise_alpha"] * pairwise).astype(np.int32) # pairwise = np.array([[0,30],[30,0]]).astype(np.int32) # print pairwise self.iparams = {} if self.segparams["use_boundary_penalties"]: sigma = self.segparams["boundary_penalties_sigma"] # set boundary penalties function # Default are penalties based on intensity differences boundary_penalties_fcn = lambda ax: self._boundary_penalties_array( axis=ax, sigma=sigma ) else: boundary_penalties_fcn = None nlinks = self.__create_nlinks( self.img, boundary_penalties_fcn=boundary_penalties_fcn ) self.stats["tlinks shape"].append(unariesalt.reshape(-1, 2).shape) self.stats["nlinks shape"].append(nlinks.shape) # we flatten the unaries # result_graph = cut_from_graph(nlinks, unaries.reshape(-1, 2), # pairwise) start = time.time() if self.debug_images: self._debug_show_unariesalt(unariesalt) result_graph = pygco.cut_from_graph(nlinks, unariesalt.reshape(-1, 2), pairwise) elapsed = time.time() - start self.stats["gc time"] = elapsed result_labeling = result_graph.reshape(self.img.shape) return result_labeling
[ "def", "_ssgc_prepare_data_and_run_computation", "(", "self", ",", "# voxels1, voxels2,", "hard_constraints", "=", "True", ",", "area_weight", "=", "1", ",", ")", ":", "# from PyQt4.QtCore import pyqtRemoveInputHook", "# pyqtRemoveInputHook()", "# import pdb; pdb.set_trace() # BREAKPOINT", "unariesalt", "=", "self", ".", "__create_tlinks", "(", "self", ".", "img", ",", "self", ".", "voxelsize", ",", "# voxels1, voxels2,", "self", ".", "seeds", ",", "area_weight", ",", "hard_constraints", ",", ")", "# některém testu organ semgmentation dosahují unaries -15. což je podiné", "# stačí vyhodit print před if a je to vidět", "logger", ".", "debug", "(", "\"unaries %.3g , %.3g\"", "%", "(", "np", ".", "max", "(", "unariesalt", ")", ",", "np", ".", "min", "(", "unariesalt", ")", ")", ")", "# create potts pairwise", "# pairwiseAlpha = -10", "pairwise", "=", "-", "(", "np", ".", "eye", "(", "2", ")", "-", "1", ")", "pairwise", "=", "(", "self", ".", "segparams", "[", "\"pairwise_alpha\"", "]", "*", "pairwise", ")", ".", "astype", "(", "np", ".", "int32", ")", "# pairwise = np.array([[0,30],[30,0]]).astype(np.int32)", "# print pairwise", "self", ".", "iparams", "=", "{", "}", "if", "self", ".", "segparams", "[", "\"use_boundary_penalties\"", "]", ":", "sigma", "=", "self", ".", "segparams", "[", "\"boundary_penalties_sigma\"", "]", "# set boundary penalties function", "# Default are penalties based on intensity differences", "boundary_penalties_fcn", "=", "lambda", "ax", ":", "self", ".", "_boundary_penalties_array", "(", "axis", "=", "ax", ",", "sigma", "=", "sigma", ")", "else", ":", "boundary_penalties_fcn", "=", "None", "nlinks", "=", "self", ".", "__create_nlinks", "(", "self", ".", "img", ",", "boundary_penalties_fcn", "=", "boundary_penalties_fcn", ")", "self", ".", "stats", "[", "\"tlinks shape\"", "]", ".", "append", "(", "unariesalt", ".", "reshape", "(", "-", "1", ",", "2", ")", ".", "shape", ")", "self", ".", "stats", "[", "\"nlinks shape\"", "]", ".", "append", "(", "nlinks", ".", "shape", ")", "# we flatten the unaries", "# result_graph = cut_from_graph(nlinks, unaries.reshape(-1, 2),", "# pairwise)", "start", "=", "time", ".", "time", "(", ")", "if", "self", ".", "debug_images", ":", "self", ".", "_debug_show_unariesalt", "(", "unariesalt", ")", "result_graph", "=", "pygco", ".", "cut_from_graph", "(", "nlinks", ",", "unariesalt", ".", "reshape", "(", "-", "1", ",", "2", ")", ",", "pairwise", ")", "elapsed", "=", "time", ".", "time", "(", ")", "-", "start", "self", ".", "stats", "[", "\"gc time\"", "]", "=", "elapsed", "result_labeling", "=", "result_graph", ".", "reshape", "(", "self", ".", "img", ".", "shape", ")", "return", "result_labeling" ]
Setting of data. You need set seeds if you want use hard_constraints.
[ "Setting", "of", "data", ".", "You", "need", "set", "seeds", "if", "you", "want", "use", "hard_constraints", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1370-L1430
mjirik/imcut
imcut/image_manipulation.py
resize_to_shape
def resize_to_shape(data, shape, zoom=None, mode="nearest", order=0): """ Function resize input data to specific shape. :param data: input 3d array-like data :param shape: shape of output data :param zoom: zoom is used for back compatibility :mode: default is 'nearest' """ # @TODO remove old code in except part # TODO use function from library in future try: # rint 'pred vyjimkou' # aise Exception ('test without skimage') # rint 'za vyjimkou' import skimage import skimage.transform # Now we need reshape seeds and segmentation to original size # with warnings.catch_warnings(): # warnings.filterwarnings("ignore", ".*'constant', will be changed to.*") segm_orig_scale = skimage.transform.resize( data, shape, order=0, preserve_range=True, mode="reflect" ) segmentation = segm_orig_scale logger.debug("resize to orig with skimage") except: if zoom is None: zoom = shape / np.asarray(data.shape).astype(np.double) segmentation = resize_to_shape_with_zoom( data, zoom=zoom, mode=mode, order=order ) return segmentation
python
def resize_to_shape(data, shape, zoom=None, mode="nearest", order=0): """ Function resize input data to specific shape. :param data: input 3d array-like data :param shape: shape of output data :param zoom: zoom is used for back compatibility :mode: default is 'nearest' """ # @TODO remove old code in except part # TODO use function from library in future try: # rint 'pred vyjimkou' # aise Exception ('test without skimage') # rint 'za vyjimkou' import skimage import skimage.transform # Now we need reshape seeds and segmentation to original size # with warnings.catch_warnings(): # warnings.filterwarnings("ignore", ".*'constant', will be changed to.*") segm_orig_scale = skimage.transform.resize( data, shape, order=0, preserve_range=True, mode="reflect" ) segmentation = segm_orig_scale logger.debug("resize to orig with skimage") except: if zoom is None: zoom = shape / np.asarray(data.shape).astype(np.double) segmentation = resize_to_shape_with_zoom( data, zoom=zoom, mode=mode, order=order ) return segmentation
[ "def", "resize_to_shape", "(", "data", ",", "shape", ",", "zoom", "=", "None", ",", "mode", "=", "\"nearest\"", ",", "order", "=", "0", ")", ":", "# @TODO remove old code in except part", "# TODO use function from library in future", "try", ":", "# rint 'pred vyjimkou'", "# aise Exception ('test without skimage')", "# rint 'za vyjimkou'", "import", "skimage", "import", "skimage", ".", "transform", "# Now we need reshape seeds and segmentation to original size", "# with warnings.catch_warnings():", "# warnings.filterwarnings(\"ignore\", \".*'constant', will be changed to.*\")", "segm_orig_scale", "=", "skimage", ".", "transform", ".", "resize", "(", "data", ",", "shape", ",", "order", "=", "0", ",", "preserve_range", "=", "True", ",", "mode", "=", "\"reflect\"", ")", "segmentation", "=", "segm_orig_scale", "logger", ".", "debug", "(", "\"resize to orig with skimage\"", ")", "except", ":", "if", "zoom", "is", "None", ":", "zoom", "=", "shape", "/", "np", ".", "asarray", "(", "data", ".", "shape", ")", ".", "astype", "(", "np", ".", "double", ")", "segmentation", "=", "resize_to_shape_with_zoom", "(", "data", ",", "zoom", "=", "zoom", ",", "mode", "=", "mode", ",", "order", "=", "order", ")", "return", "segmentation" ]
Function resize input data to specific shape. :param data: input 3d array-like data :param shape: shape of output data :param zoom: zoom is used for back compatibility :mode: default is 'nearest'
[ "Function", "resize", "input", "data", "to", "specific", "shape", ".", ":", "param", "data", ":", "input", "3d", "array", "-", "like", "data", ":", "param", "shape", ":", "shape", "of", "output", "data", ":", "param", "zoom", ":", "zoom", "is", "used", "for", "back", "compatibility", ":", "mode", ":", "default", "is", "nearest" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L14-L49
mjirik/imcut
imcut/image_manipulation.py
seed_zoom
def seed_zoom(seeds, zoom): """ Smart zoom for sparse matrix. If there is resize to bigger resolution thin line of label could be lost. This function prefers labels larger then zero. If there is only one small voxel in larger volume with zeros it is selected. """ # import scipy # loseeds=seeds labels = np.unique(seeds) # remove first label - 0 labels = np.delete(labels, 0) # @TODO smart interpolation for seeds in one block # loseeds = scipy.ndimage.interpolation.zoom( # seeds, zoom, order=0) loshape = np.ceil(np.array(seeds.shape) * 1.0 / zoom).astype(np.int) loseeds = np.zeros(loshape, dtype=np.int8) loseeds = loseeds.astype(np.int8) for label in labels: a, b, c = np.where(seeds == label) loa = np.round(a // zoom) lob = np.round(b // zoom) loc = np.round(c // zoom) # loseeds = np.zeros(loshape) loseeds[loa, lob, loc] += label # this is to detect conflict seeds loseeds[loseeds > label] = 100 # remove conflict seeds loseeds[loseeds > 99] = 0 # import py3DSeedEditor # ped = py3DSeedEditor.py3DSeedEditor(loseeds) # ped.show() return loseeds
python
def seed_zoom(seeds, zoom): """ Smart zoom for sparse matrix. If there is resize to bigger resolution thin line of label could be lost. This function prefers labels larger then zero. If there is only one small voxel in larger volume with zeros it is selected. """ # import scipy # loseeds=seeds labels = np.unique(seeds) # remove first label - 0 labels = np.delete(labels, 0) # @TODO smart interpolation for seeds in one block # loseeds = scipy.ndimage.interpolation.zoom( # seeds, zoom, order=0) loshape = np.ceil(np.array(seeds.shape) * 1.0 / zoom).astype(np.int) loseeds = np.zeros(loshape, dtype=np.int8) loseeds = loseeds.astype(np.int8) for label in labels: a, b, c = np.where(seeds == label) loa = np.round(a // zoom) lob = np.round(b // zoom) loc = np.round(c // zoom) # loseeds = np.zeros(loshape) loseeds[loa, lob, loc] += label # this is to detect conflict seeds loseeds[loseeds > label] = 100 # remove conflict seeds loseeds[loseeds > 99] = 0 # import py3DSeedEditor # ped = py3DSeedEditor.py3DSeedEditor(loseeds) # ped.show() return loseeds
[ "def", "seed_zoom", "(", "seeds", ",", "zoom", ")", ":", "# import scipy", "# loseeds=seeds", "labels", "=", "np", ".", "unique", "(", "seeds", ")", "# remove first label - 0", "labels", "=", "np", ".", "delete", "(", "labels", ",", "0", ")", "# @TODO smart interpolation for seeds in one block", "# loseeds = scipy.ndimage.interpolation.zoom(", "# seeds, zoom, order=0)", "loshape", "=", "np", ".", "ceil", "(", "np", ".", "array", "(", "seeds", ".", "shape", ")", "*", "1.0", "/", "zoom", ")", ".", "astype", "(", "np", ".", "int", ")", "loseeds", "=", "np", ".", "zeros", "(", "loshape", ",", "dtype", "=", "np", ".", "int8", ")", "loseeds", "=", "loseeds", ".", "astype", "(", "np", ".", "int8", ")", "for", "label", "in", "labels", ":", "a", ",", "b", ",", "c", "=", "np", ".", "where", "(", "seeds", "==", "label", ")", "loa", "=", "np", ".", "round", "(", "a", "//", "zoom", ")", "lob", "=", "np", ".", "round", "(", "b", "//", "zoom", ")", "loc", "=", "np", ".", "round", "(", "c", "//", "zoom", ")", "# loseeds = np.zeros(loshape)", "loseeds", "[", "loa", ",", "lob", ",", "loc", "]", "+=", "label", "# this is to detect conflict seeds", "loseeds", "[", "loseeds", ">", "label", "]", "=", "100", "# remove conflict seeds", "loseeds", "[", "loseeds", ">", "99", "]", "=", "0", "# import py3DSeedEditor", "# ped = py3DSeedEditor.py3DSeedEditor(loseeds)", "# ped.show()", "return", "loseeds" ]
Smart zoom for sparse matrix. If there is resize to bigger resolution thin line of label could be lost. This function prefers labels larger then zero. If there is only one small voxel in larger volume with zeros it is selected.
[ "Smart", "zoom", "for", "sparse", "matrix", ".", "If", "there", "is", "resize", "to", "bigger", "resolution", "thin", "line", "of", "label", "could", "be", "lost", ".", "This", "function", "prefers", "labels", "larger", "then", "zero", ".", "If", "there", "is", "only", "one", "small", "voxel", "in", "larger", "volume", "with", "zeros", "it", "is", "selected", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L85-L121
mjirik/imcut
imcut/image_manipulation.py
zoom_to_shape
def zoom_to_shape(data, shape, dtype=None): """ Zoom data to specific shape. """ import scipy import scipy.ndimage zoomd = np.array(shape) / np.array(data.shape, dtype=np.double) import warnings datares = scipy.ndimage.interpolation.zoom(data, zoomd, order=0, mode="reflect") if datares.shape != shape: logger.warning("Zoom with different output shape") dataout = np.zeros(shape, dtype=dtype) shpmin = np.minimum(dataout.shape, shape) dataout[: shpmin[0], : shpmin[1], : shpmin[2]] = datares[ : shpmin[0], : shpmin[1], : shpmin[2] ] return datares
python
def zoom_to_shape(data, shape, dtype=None): """ Zoom data to specific shape. """ import scipy import scipy.ndimage zoomd = np.array(shape) / np.array(data.shape, dtype=np.double) import warnings datares = scipy.ndimage.interpolation.zoom(data, zoomd, order=0, mode="reflect") if datares.shape != shape: logger.warning("Zoom with different output shape") dataout = np.zeros(shape, dtype=dtype) shpmin = np.minimum(dataout.shape, shape) dataout[: shpmin[0], : shpmin[1], : shpmin[2]] = datares[ : shpmin[0], : shpmin[1], : shpmin[2] ] return datares
[ "def", "zoom_to_shape", "(", "data", ",", "shape", ",", "dtype", "=", "None", ")", ":", "import", "scipy", "import", "scipy", ".", "ndimage", "zoomd", "=", "np", ".", "array", "(", "shape", ")", "/", "np", ".", "array", "(", "data", ".", "shape", ",", "dtype", "=", "np", ".", "double", ")", "import", "warnings", "datares", "=", "scipy", ".", "ndimage", ".", "interpolation", ".", "zoom", "(", "data", ",", "zoomd", ",", "order", "=", "0", ",", "mode", "=", "\"reflect\"", ")", "if", "datares", ".", "shape", "!=", "shape", ":", "logger", ".", "warning", "(", "\"Zoom with different output shape\"", ")", "dataout", "=", "np", ".", "zeros", "(", "shape", ",", "dtype", "=", "dtype", ")", "shpmin", "=", "np", ".", "minimum", "(", "dataout", ".", "shape", ",", "shape", ")", "dataout", "[", ":", "shpmin", "[", "0", "]", ",", ":", "shpmin", "[", "1", "]", ",", ":", "shpmin", "[", "2", "]", "]", "=", "datares", "[", ":", "shpmin", "[", "0", "]", ",", ":", "shpmin", "[", "1", "]", ",", ":", "shpmin", "[", "2", "]", "]", "return", "datares" ]
Zoom data to specific shape.
[ "Zoom", "data", "to", "specific", "shape", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L124-L144
mjirik/imcut
imcut/image_manipulation.py
crop
def crop(data, crinfo): """ Crop the data. crop(data, crinfo) :param crinfo: min and max for each axis - [[minX, maxX], [minY, maxY], [minZ, maxZ]] """ crinfo = fix_crinfo(crinfo) return data[ __int_or_none(crinfo[0][0]) : __int_or_none(crinfo[0][1]), __int_or_none(crinfo[1][0]) : __int_or_none(crinfo[1][1]), __int_or_none(crinfo[2][0]) : __int_or_none(crinfo[2][1]), ]
python
def crop(data, crinfo): """ Crop the data. crop(data, crinfo) :param crinfo: min and max for each axis - [[minX, maxX], [minY, maxY], [minZ, maxZ]] """ crinfo = fix_crinfo(crinfo) return data[ __int_or_none(crinfo[0][0]) : __int_or_none(crinfo[0][1]), __int_or_none(crinfo[1][0]) : __int_or_none(crinfo[1][1]), __int_or_none(crinfo[2][0]) : __int_or_none(crinfo[2][1]), ]
[ "def", "crop", "(", "data", ",", "crinfo", ")", ":", "crinfo", "=", "fix_crinfo", "(", "crinfo", ")", "return", "data", "[", "__int_or_none", "(", "crinfo", "[", "0", "]", "[", "0", "]", ")", ":", "__int_or_none", "(", "crinfo", "[", "0", "]", "[", "1", "]", ")", ",", "__int_or_none", "(", "crinfo", "[", "1", "]", "[", "0", "]", ")", ":", "__int_or_none", "(", "crinfo", "[", "1", "]", "[", "1", "]", ")", ",", "__int_or_none", "(", "crinfo", "[", "2", "]", "[", "0", "]", ")", ":", "__int_or_none", "(", "crinfo", "[", "2", "]", "[", "1", "]", ")", ",", "]" ]
Crop the data. crop(data, crinfo) :param crinfo: min and max for each axis - [[minX, maxX], [minY, maxY], [minZ, maxZ]]
[ "Crop", "the", "data", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L361-L375
mjirik/imcut
imcut/image_manipulation.py
combinecrinfo
def combinecrinfo(crinfo1, crinfo2): """ Combine two crinfos. First used is crinfo1, second used is crinfo2. """ crinfo1 = fix_crinfo(crinfo1) crinfo2 = fix_crinfo(crinfo2) crinfo = [ [crinfo1[0][0] + crinfo2[0][0], crinfo1[0][0] + crinfo2[0][1]], [crinfo1[1][0] + crinfo2[1][0], crinfo1[1][0] + crinfo2[1][1]], [crinfo1[2][0] + crinfo2[2][0], crinfo1[2][0] + crinfo2[2][1]], ] return crinfo
python
def combinecrinfo(crinfo1, crinfo2): """ Combine two crinfos. First used is crinfo1, second used is crinfo2. """ crinfo1 = fix_crinfo(crinfo1) crinfo2 = fix_crinfo(crinfo2) crinfo = [ [crinfo1[0][0] + crinfo2[0][0], crinfo1[0][0] + crinfo2[0][1]], [crinfo1[1][0] + crinfo2[1][0], crinfo1[1][0] + crinfo2[1][1]], [crinfo1[2][0] + crinfo2[2][0], crinfo1[2][0] + crinfo2[2][1]], ] return crinfo
[ "def", "combinecrinfo", "(", "crinfo1", ",", "crinfo2", ")", ":", "crinfo1", "=", "fix_crinfo", "(", "crinfo1", ")", "crinfo2", "=", "fix_crinfo", "(", "crinfo2", ")", "crinfo", "=", "[", "[", "crinfo1", "[", "0", "]", "[", "0", "]", "+", "crinfo2", "[", "0", "]", "[", "0", "]", ",", "crinfo1", "[", "0", "]", "[", "0", "]", "+", "crinfo2", "[", "0", "]", "[", "1", "]", "]", ",", "[", "crinfo1", "[", "1", "]", "[", "0", "]", "+", "crinfo2", "[", "1", "]", "[", "0", "]", ",", "crinfo1", "[", "1", "]", "[", "0", "]", "+", "crinfo2", "[", "1", "]", "[", "1", "]", "]", ",", "[", "crinfo1", "[", "2", "]", "[", "0", "]", "+", "crinfo2", "[", "2", "]", "[", "0", "]", ",", "crinfo1", "[", "2", "]", "[", "0", "]", "+", "crinfo2", "[", "2", "]", "[", "1", "]", "]", ",", "]", "return", "crinfo" ]
Combine two crinfos. First used is crinfo1, second used is crinfo2.
[ "Combine", "two", "crinfos", ".", "First", "used", "is", "crinfo1", "second", "used", "is", "crinfo2", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L384-L397
mjirik/imcut
imcut/image_manipulation.py
crinfo_from_specific_data
def crinfo_from_specific_data(data, margin=0): """ Create crinfo of minimum orthogonal nonzero block in input data. :param data: input data :param margin: add margin to minimum block :return: """ # hledáme automatický ořez, nonzero dá indexy logger.debug("crinfo") logger.debug(str(margin)) nzi = np.nonzero(data) logger.debug(str(nzi)) if np.isscalar(margin): margin = [margin] * 3 x1 = np.min(nzi[0]) - margin[0] x2 = np.max(nzi[0]) + margin[0] + 1 y1 = np.min(nzi[1]) - margin[0] y2 = np.max(nzi[1]) + margin[0] + 1 z1 = np.min(nzi[2]) - margin[0] z2 = np.max(nzi[2]) + margin[0] + 1 # ošetření mezí polí if x1 < 0: x1 = 0 if y1 < 0: y1 = 0 if z1 < 0: z1 = 0 if x2 > data.shape[0]: x2 = data.shape[0] - 1 if y2 > data.shape[1]: y2 = data.shape[1] - 1 if z2 > data.shape[2]: z2 = data.shape[2] - 1 # ořez crinfo = [[x1, x2], [y1, y2], [z1, z2]] return crinfo
python
def crinfo_from_specific_data(data, margin=0): """ Create crinfo of minimum orthogonal nonzero block in input data. :param data: input data :param margin: add margin to minimum block :return: """ # hledáme automatický ořez, nonzero dá indexy logger.debug("crinfo") logger.debug(str(margin)) nzi = np.nonzero(data) logger.debug(str(nzi)) if np.isscalar(margin): margin = [margin] * 3 x1 = np.min(nzi[0]) - margin[0] x2 = np.max(nzi[0]) + margin[0] + 1 y1 = np.min(nzi[1]) - margin[0] y2 = np.max(nzi[1]) + margin[0] + 1 z1 = np.min(nzi[2]) - margin[0] z2 = np.max(nzi[2]) + margin[0] + 1 # ošetření mezí polí if x1 < 0: x1 = 0 if y1 < 0: y1 = 0 if z1 < 0: z1 = 0 if x2 > data.shape[0]: x2 = data.shape[0] - 1 if y2 > data.shape[1]: y2 = data.shape[1] - 1 if z2 > data.shape[2]: z2 = data.shape[2] - 1 # ořez crinfo = [[x1, x2], [y1, y2], [z1, z2]] return crinfo
[ "def", "crinfo_from_specific_data", "(", "data", ",", "margin", "=", "0", ")", ":", "# hledáme automatický ořez, nonzero dá indexy", "logger", ".", "debug", "(", "\"crinfo\"", ")", "logger", ".", "debug", "(", "str", "(", "margin", ")", ")", "nzi", "=", "np", ".", "nonzero", "(", "data", ")", "logger", ".", "debug", "(", "str", "(", "nzi", ")", ")", "if", "np", ".", "isscalar", "(", "margin", ")", ":", "margin", "=", "[", "margin", "]", "*", "3", "x1", "=", "np", ".", "min", "(", "nzi", "[", "0", "]", ")", "-", "margin", "[", "0", "]", "x2", "=", "np", ".", "max", "(", "nzi", "[", "0", "]", ")", "+", "margin", "[", "0", "]", "+", "1", "y1", "=", "np", ".", "min", "(", "nzi", "[", "1", "]", ")", "-", "margin", "[", "0", "]", "y2", "=", "np", ".", "max", "(", "nzi", "[", "1", "]", ")", "+", "margin", "[", "0", "]", "+", "1", "z1", "=", "np", ".", "min", "(", "nzi", "[", "2", "]", ")", "-", "margin", "[", "0", "]", "z2", "=", "np", ".", "max", "(", "nzi", "[", "2", "]", ")", "+", "margin", "[", "0", "]", "+", "1", "# ošetření mezí polí", "if", "x1", "<", "0", ":", "x1", "=", "0", "if", "y1", "<", "0", ":", "y1", "=", "0", "if", "z1", "<", "0", ":", "z1", "=", "0", "if", "x2", ">", "data", ".", "shape", "[", "0", "]", ":", "x2", "=", "data", ".", "shape", "[", "0", "]", "-", "1", "if", "y2", ">", "data", ".", "shape", "[", "1", "]", ":", "y2", "=", "data", ".", "shape", "[", "1", "]", "-", "1", "if", "z2", ">", "data", ".", "shape", "[", "2", "]", ":", "z2", "=", "data", ".", "shape", "[", "2", "]", "-", "1", "# ořez", "crinfo", "=", "[", "[", "x1", ",", "x2", "]", ",", "[", "y1", ",", "y2", "]", ",", "[", "z1", ",", "z2", "]", "]", "return", "crinfo" ]
Create crinfo of minimum orthogonal nonzero block in input data. :param data: input data :param margin: add margin to minimum block :return:
[ "Create", "crinfo", "of", "minimum", "orthogonal", "nonzero", "block", "in", "input", "data", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L400-L441
mjirik/imcut
imcut/image_manipulation.py
uncrop
def uncrop(data, crinfo, orig_shape, resize=False, outside_mode="constant", cval=0): """ Put some boundary to input image. :param data: input data :param crinfo: array with minimum and maximum index along each axis [[minX, maxX],[minY, maxY],[minZ, maxZ]]. If crinfo is None, the whole input image is placed into [0, 0, 0]. If crinfo is just series of three numbers, it is used as an initial point for input image placement. :param orig_shape: shape of uncropped image :param resize: True or False (default). Usefull if the data.shape does not fit to crinfo shape. :param outside_mode: 'constant', 'nearest' :return: """ if crinfo is None: crinfo = list(zip([0] * data.ndim, orig_shape)) elif np.asarray(crinfo).size == data.ndim: crinfo = list(zip(crinfo, np.asarray(crinfo) + data.shape)) crinfo = fix_crinfo(crinfo) data_out = np.ones(orig_shape, dtype=data.dtype) * cval # print 'uncrop ', crinfo # print orig_shape # print data.shape if resize: data = resize_to_shape(data, crinfo[:, 1] - crinfo[:, 0]) startx = np.round(crinfo[0][0]).astype(int) starty = np.round(crinfo[1][0]).astype(int) startz = np.round(crinfo[2][0]).astype(int) data_out[ # np.round(crinfo[0][0]).astype(int):np.round(crinfo[0][1]).astype(int)+1, # np.round(crinfo[1][0]).astype(int):np.round(crinfo[1][1]).astype(int)+1, # np.round(crinfo[2][0]).astype(int):np.round(crinfo[2][1]).astype(int)+1 startx : startx + data.shape[0], starty : starty + data.shape[1], startz : startz + data.shape[2], ] = data if outside_mode == "nearest": # for ax in range(data.ndims): # ax = 0 # copy border slice to pixels out of boundary - the higher part for ax in range(data.ndim): # the part under the crop start = np.round(crinfo[ax][0]).astype(int) slices = [slice(None), slice(None), slice(None)] slices[ax] = start repeated_slice = np.expand_dims(data_out[slices], ax) append_sz = start if append_sz > 0: tile0 = np.repeat(repeated_slice, append_sz, axis=ax) slices = [slice(None), slice(None), slice(None)] slices[ax] = slice(None, start) # data_out[start + data.shape[ax] : , :, :] = tile0 data_out[slices] = tile0 # plt.imshow(np.squeeze(repeated_slice)) # plt.show() # the part over the crop start = np.round(crinfo[ax][0]).astype(int) slices = [slice(None), slice(None), slice(None)] slices[ax] = start + data.shape[ax] - 1 repeated_slice = np.expand_dims(data_out[slices], ax) append_sz = data_out.shape[ax] - (start + data.shape[ax]) if append_sz > 0: tile0 = np.repeat(repeated_slice, append_sz, axis=ax) slices = [slice(None), slice(None), slice(None)] slices[ax] = slice(start + data.shape[ax], None) # data_out[start + data.shape[ax] : , :, :] = tile0 data_out[slices] = tile0 # plt.imshow(np.squeeze(repeated_slice)) # plt.show() return data_out
python
def uncrop(data, crinfo, orig_shape, resize=False, outside_mode="constant", cval=0): """ Put some boundary to input image. :param data: input data :param crinfo: array with minimum and maximum index along each axis [[minX, maxX],[minY, maxY],[minZ, maxZ]]. If crinfo is None, the whole input image is placed into [0, 0, 0]. If crinfo is just series of three numbers, it is used as an initial point for input image placement. :param orig_shape: shape of uncropped image :param resize: True or False (default). Usefull if the data.shape does not fit to crinfo shape. :param outside_mode: 'constant', 'nearest' :return: """ if crinfo is None: crinfo = list(zip([0] * data.ndim, orig_shape)) elif np.asarray(crinfo).size == data.ndim: crinfo = list(zip(crinfo, np.asarray(crinfo) + data.shape)) crinfo = fix_crinfo(crinfo) data_out = np.ones(orig_shape, dtype=data.dtype) * cval # print 'uncrop ', crinfo # print orig_shape # print data.shape if resize: data = resize_to_shape(data, crinfo[:, 1] - crinfo[:, 0]) startx = np.round(crinfo[0][0]).astype(int) starty = np.round(crinfo[1][0]).astype(int) startz = np.round(crinfo[2][0]).astype(int) data_out[ # np.round(crinfo[0][0]).astype(int):np.round(crinfo[0][1]).astype(int)+1, # np.round(crinfo[1][0]).astype(int):np.round(crinfo[1][1]).astype(int)+1, # np.round(crinfo[2][0]).astype(int):np.round(crinfo[2][1]).astype(int)+1 startx : startx + data.shape[0], starty : starty + data.shape[1], startz : startz + data.shape[2], ] = data if outside_mode == "nearest": # for ax in range(data.ndims): # ax = 0 # copy border slice to pixels out of boundary - the higher part for ax in range(data.ndim): # the part under the crop start = np.round(crinfo[ax][0]).astype(int) slices = [slice(None), slice(None), slice(None)] slices[ax] = start repeated_slice = np.expand_dims(data_out[slices], ax) append_sz = start if append_sz > 0: tile0 = np.repeat(repeated_slice, append_sz, axis=ax) slices = [slice(None), slice(None), slice(None)] slices[ax] = slice(None, start) # data_out[start + data.shape[ax] : , :, :] = tile0 data_out[slices] = tile0 # plt.imshow(np.squeeze(repeated_slice)) # plt.show() # the part over the crop start = np.round(crinfo[ax][0]).astype(int) slices = [slice(None), slice(None), slice(None)] slices[ax] = start + data.shape[ax] - 1 repeated_slice = np.expand_dims(data_out[slices], ax) append_sz = data_out.shape[ax] - (start + data.shape[ax]) if append_sz > 0: tile0 = np.repeat(repeated_slice, append_sz, axis=ax) slices = [slice(None), slice(None), slice(None)] slices[ax] = slice(start + data.shape[ax], None) # data_out[start + data.shape[ax] : , :, :] = tile0 data_out[slices] = tile0 # plt.imshow(np.squeeze(repeated_slice)) # plt.show() return data_out
[ "def", "uncrop", "(", "data", ",", "crinfo", ",", "orig_shape", ",", "resize", "=", "False", ",", "outside_mode", "=", "\"constant\"", ",", "cval", "=", "0", ")", ":", "if", "crinfo", "is", "None", ":", "crinfo", "=", "list", "(", "zip", "(", "[", "0", "]", "*", "data", ".", "ndim", ",", "orig_shape", ")", ")", "elif", "np", ".", "asarray", "(", "crinfo", ")", ".", "size", "==", "data", ".", "ndim", ":", "crinfo", "=", "list", "(", "zip", "(", "crinfo", ",", "np", ".", "asarray", "(", "crinfo", ")", "+", "data", ".", "shape", ")", ")", "crinfo", "=", "fix_crinfo", "(", "crinfo", ")", "data_out", "=", "np", ".", "ones", "(", "orig_shape", ",", "dtype", "=", "data", ".", "dtype", ")", "*", "cval", "# print 'uncrop ', crinfo", "# print orig_shape", "# print data.shape", "if", "resize", ":", "data", "=", "resize_to_shape", "(", "data", ",", "crinfo", "[", ":", ",", "1", "]", "-", "crinfo", "[", ":", ",", "0", "]", ")", "startx", "=", "np", ".", "round", "(", "crinfo", "[", "0", "]", "[", "0", "]", ")", ".", "astype", "(", "int", ")", "starty", "=", "np", ".", "round", "(", "crinfo", "[", "1", "]", "[", "0", "]", ")", ".", "astype", "(", "int", ")", "startz", "=", "np", ".", "round", "(", "crinfo", "[", "2", "]", "[", "0", "]", ")", ".", "astype", "(", "int", ")", "data_out", "[", "# np.round(crinfo[0][0]).astype(int):np.round(crinfo[0][1]).astype(int)+1,", "# np.round(crinfo[1][0]).astype(int):np.round(crinfo[1][1]).astype(int)+1,", "# np.round(crinfo[2][0]).astype(int):np.round(crinfo[2][1]).astype(int)+1", "startx", ":", "startx", "+", "data", ".", "shape", "[", "0", "]", ",", "starty", ":", "starty", "+", "data", ".", "shape", "[", "1", "]", ",", "startz", ":", "startz", "+", "data", ".", "shape", "[", "2", "]", ",", "]", "=", "data", "if", "outside_mode", "==", "\"nearest\"", ":", "# for ax in range(data.ndims):", "# ax = 0", "# copy border slice to pixels out of boundary - the higher part", "for", "ax", "in", "range", "(", "data", ".", "ndim", ")", ":", "# the part under the crop", "start", "=", "np", ".", "round", "(", "crinfo", "[", "ax", "]", "[", "0", "]", ")", ".", "astype", "(", "int", ")", "slices", "=", "[", "slice", "(", "None", ")", ",", "slice", "(", "None", ")", ",", "slice", "(", "None", ")", "]", "slices", "[", "ax", "]", "=", "start", "repeated_slice", "=", "np", ".", "expand_dims", "(", "data_out", "[", "slices", "]", ",", "ax", ")", "append_sz", "=", "start", "if", "append_sz", ">", "0", ":", "tile0", "=", "np", ".", "repeat", "(", "repeated_slice", ",", "append_sz", ",", "axis", "=", "ax", ")", "slices", "=", "[", "slice", "(", "None", ")", ",", "slice", "(", "None", ")", ",", "slice", "(", "None", ")", "]", "slices", "[", "ax", "]", "=", "slice", "(", "None", ",", "start", ")", "# data_out[start + data.shape[ax] : , :, :] = tile0", "data_out", "[", "slices", "]", "=", "tile0", "# plt.imshow(np.squeeze(repeated_slice))", "# plt.show()", "# the part over the crop", "start", "=", "np", ".", "round", "(", "crinfo", "[", "ax", "]", "[", "0", "]", ")", ".", "astype", "(", "int", ")", "slices", "=", "[", "slice", "(", "None", ")", ",", "slice", "(", "None", ")", ",", "slice", "(", "None", ")", "]", "slices", "[", "ax", "]", "=", "start", "+", "data", ".", "shape", "[", "ax", "]", "-", "1", "repeated_slice", "=", "np", ".", "expand_dims", "(", "data_out", "[", "slices", "]", ",", "ax", ")", "append_sz", "=", "data_out", ".", "shape", "[", "ax", "]", "-", "(", "start", "+", "data", ".", "shape", "[", "ax", "]", ")", "if", "append_sz", ">", "0", ":", "tile0", "=", "np", ".", "repeat", "(", "repeated_slice", ",", "append_sz", ",", "axis", "=", "ax", ")", "slices", "=", "[", "slice", "(", "None", ")", ",", "slice", "(", "None", ")", ",", "slice", "(", "None", ")", "]", "slices", "[", "ax", "]", "=", "slice", "(", "start", "+", "data", ".", "shape", "[", "ax", "]", ",", "None", ")", "# data_out[start + data.shape[ax] : , :, :] = tile0", "data_out", "[", "slices", "]", "=", "tile0", "# plt.imshow(np.squeeze(repeated_slice))", "# plt.show()", "return", "data_out" ]
Put some boundary to input image. :param data: input data :param crinfo: array with minimum and maximum index along each axis [[minX, maxX],[minY, maxY],[minZ, maxZ]]. If crinfo is None, the whole input image is placed into [0, 0, 0]. If crinfo is just series of three numbers, it is used as an initial point for input image placement. :param orig_shape: shape of uncropped image :param resize: True or False (default). Usefull if the data.shape does not fit to crinfo shape. :param outside_mode: 'constant', 'nearest' :return:
[ "Put", "some", "boundary", "to", "input", "image", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L444-L522
mjirik/imcut
imcut/image_manipulation.py
fix_crinfo
def fix_crinfo(crinfo, to="axis"): """ Function recognize order of crinfo and convert it to proper format. """ crinfo = np.asarray(crinfo) if crinfo.shape[0] == 2: crinfo = crinfo.T return crinfo
python
def fix_crinfo(crinfo, to="axis"): """ Function recognize order of crinfo and convert it to proper format. """ crinfo = np.asarray(crinfo) if crinfo.shape[0] == 2: crinfo = crinfo.T return crinfo
[ "def", "fix_crinfo", "(", "crinfo", ",", "to", "=", "\"axis\"", ")", ":", "crinfo", "=", "np", ".", "asarray", "(", "crinfo", ")", "if", "crinfo", ".", "shape", "[", "0", "]", "==", "2", ":", "crinfo", "=", "crinfo", ".", "T", "return", "crinfo" ]
Function recognize order of crinfo and convert it to proper format.
[ "Function", "recognize", "order", "of", "crinfo", "and", "convert", "it", "to", "proper", "format", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L525-L534
mjirik/imcut
imcut/graph.py
grid_edges
def grid_edges(shape, inds=None, return_directions=True): """ Get list of grid edges :param shape: :param inds: :param return_directions: :return: """ if inds is None: inds = np.arange(np.prod(shape)).reshape(shape) # if not self.segparams['use_boundary_penalties'] and \ # boundary_penalties_fcn is None : if len(shape) == 2: edgx = np.c_[inds[:, :-1].ravel(), inds[:, 1:].ravel()] edgy = np.c_[inds[:-1, :].ravel(), inds[1:, :].ravel()] edges = [edgx, edgy] directions = [ np.ones([edgx.shape[0]], dtype=np.int8) * 0, np.ones([edgy.shape[0]], dtype=np.int8) * 1, ] elif len(shape) == 3: # This is faster for some specific format edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()] edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()] edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()] edges = [edgx, edgy, edgz] else: logger.error("Expected 2D or 3D data") # for all edges along first direction put 0, for second direction put 1, for third direction put 3 if return_directions: directions = [] for idirection in range(len(shape)): directions.append( np.ones([edges[idirection].shape[0]], dtype=np.int8) * idirection ) edges = np.concatenate(edges) if return_directions: edge_dir = np.concatenate(directions) return edges, edge_dir else: return edges
python
def grid_edges(shape, inds=None, return_directions=True): """ Get list of grid edges :param shape: :param inds: :param return_directions: :return: """ if inds is None: inds = np.arange(np.prod(shape)).reshape(shape) # if not self.segparams['use_boundary_penalties'] and \ # boundary_penalties_fcn is None : if len(shape) == 2: edgx = np.c_[inds[:, :-1].ravel(), inds[:, 1:].ravel()] edgy = np.c_[inds[:-1, :].ravel(), inds[1:, :].ravel()] edges = [edgx, edgy] directions = [ np.ones([edgx.shape[0]], dtype=np.int8) * 0, np.ones([edgy.shape[0]], dtype=np.int8) * 1, ] elif len(shape) == 3: # This is faster for some specific format edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()] edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()] edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()] edges = [edgx, edgy, edgz] else: logger.error("Expected 2D or 3D data") # for all edges along first direction put 0, for second direction put 1, for third direction put 3 if return_directions: directions = [] for idirection in range(len(shape)): directions.append( np.ones([edges[idirection].shape[0]], dtype=np.int8) * idirection ) edges = np.concatenate(edges) if return_directions: edge_dir = np.concatenate(directions) return edges, edge_dir else: return edges
[ "def", "grid_edges", "(", "shape", ",", "inds", "=", "None", ",", "return_directions", "=", "True", ")", ":", "if", "inds", "is", "None", ":", "inds", "=", "np", ".", "arange", "(", "np", ".", "prod", "(", "shape", ")", ")", ".", "reshape", "(", "shape", ")", "# if not self.segparams['use_boundary_penalties'] and \\", "# boundary_penalties_fcn is None :", "if", "len", "(", "shape", ")", "==", "2", ":", "edgx", "=", "np", ".", "c_", "[", "inds", "[", ":", ",", ":", "-", "1", "]", ".", "ravel", "(", ")", ",", "inds", "[", ":", ",", "1", ":", "]", ".", "ravel", "(", ")", "]", "edgy", "=", "np", ".", "c_", "[", "inds", "[", ":", "-", "1", ",", ":", "]", ".", "ravel", "(", ")", ",", "inds", "[", "1", ":", ",", ":", "]", ".", "ravel", "(", ")", "]", "edges", "=", "[", "edgx", ",", "edgy", "]", "directions", "=", "[", "np", ".", "ones", "(", "[", "edgx", ".", "shape", "[", "0", "]", "]", ",", "dtype", "=", "np", ".", "int8", ")", "*", "0", ",", "np", ".", "ones", "(", "[", "edgy", ".", "shape", "[", "0", "]", "]", ",", "dtype", "=", "np", ".", "int8", ")", "*", "1", ",", "]", "elif", "len", "(", "shape", ")", "==", "3", ":", "# This is faster for some specific format", "edgx", "=", "np", ".", "c_", "[", "inds", "[", ":", ",", ":", ",", ":", "-", "1", "]", ".", "ravel", "(", ")", ",", "inds", "[", ":", ",", ":", ",", "1", ":", "]", ".", "ravel", "(", ")", "]", "edgy", "=", "np", ".", "c_", "[", "inds", "[", ":", ",", ":", "-", "1", ",", ":", "]", ".", "ravel", "(", ")", ",", "inds", "[", ":", ",", "1", ":", ",", ":", "]", ".", "ravel", "(", ")", "]", "edgz", "=", "np", ".", "c_", "[", "inds", "[", ":", "-", "1", ",", ":", ",", ":", "]", ".", "ravel", "(", ")", ",", "inds", "[", "1", ":", ",", ":", ",", ":", "]", ".", "ravel", "(", ")", "]", "edges", "=", "[", "edgx", ",", "edgy", ",", "edgz", "]", "else", ":", "logger", ".", "error", "(", "\"Expected 2D or 3D data\"", ")", "# for all edges along first direction put 0, for second direction put 1, for third direction put 3", "if", "return_directions", ":", "directions", "=", "[", "]", "for", "idirection", "in", "range", "(", "len", "(", "shape", ")", ")", ":", "directions", ".", "append", "(", "np", ".", "ones", "(", "[", "edges", "[", "idirection", "]", ".", "shape", "[", "0", "]", "]", ",", "dtype", "=", "np", ".", "int8", ")", "*", "idirection", ")", "edges", "=", "np", ".", "concatenate", "(", "edges", ")", "if", "return_directions", ":", "edge_dir", "=", "np", ".", "concatenate", "(", "directions", ")", "return", "edges", ",", "edge_dir", "else", ":", "return", "edges" ]
Get list of grid edges :param shape: :param inds: :param return_directions: :return:
[ "Get", "list", "of", "grid", "edges", ":", "param", "shape", ":", ":", "param", "inds", ":", ":", "param", "return_directions", ":", ":", "return", ":" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L524-L568
mjirik/imcut
imcut/graph.py
gen_grid_2d
def gen_grid_2d(shape, voxelsize): """ Generate list of edges for a base grid. """ nr, nc = shape nrm1, ncm1 = nr - 1, nc - 1 # sh = nm.asarray(shape) # calculate number of edges, in 2D: (nrows * (ncols - 1)) + ((nrows - 1) * ncols) nedges = 0 for direction in range(len(shape)): sh = copy.copy(list(shape)) sh[direction] += -1 nedges += nm.prod(sh) nedges_old = ncm1 * nr + nrm1 * nc edges = nm.zeros((nedges, 2), dtype=nm.int16) edge_dir = nm.zeros((ncm1 * nr + nrm1 * nc,), dtype=nm.bool) nodes = nm.zeros((nm.prod(shape), 3), dtype=nm.float32) # edges idx = 0 row = nm.zeros((ncm1, 2), dtype=nm.int16) row[:, 0] = nm.arange(ncm1) row[:, 1] = nm.arange(ncm1) + 1 for ii in range(nr): edges[slice(idx, idx + ncm1), :] = row + nc * ii idx += ncm1 edge_dir[slice(0, idx)] = 0 # horizontal dir idx0 = idx col = nm.zeros((nrm1, 2), dtype=nm.int16) col[:, 0] = nm.arange(nrm1) * nc col[:, 1] = nm.arange(nrm1) * nc + nc for ii in range(nc): edges[slice(idx, idx + nrm1), :] = col + ii idx += nrm1 edge_dir[slice(idx0, idx)] = 1 # vertical dir # nodes idx = 0 row = nm.zeros((nc, 3), dtype=nm.float32) row[:, 0] = voxelsize[0] * (nm.arange(nc) + 0.5) row[:, 1] = voxelsize[1] * 0.5 for ii in range(nr): nodes[slice(idx, idx + nc), :] = row row[:, 1] += voxelsize[1] idx += nc return nodes, edges, edge_dir
python
def gen_grid_2d(shape, voxelsize): """ Generate list of edges for a base grid. """ nr, nc = shape nrm1, ncm1 = nr - 1, nc - 1 # sh = nm.asarray(shape) # calculate number of edges, in 2D: (nrows * (ncols - 1)) + ((nrows - 1) * ncols) nedges = 0 for direction in range(len(shape)): sh = copy.copy(list(shape)) sh[direction] += -1 nedges += nm.prod(sh) nedges_old = ncm1 * nr + nrm1 * nc edges = nm.zeros((nedges, 2), dtype=nm.int16) edge_dir = nm.zeros((ncm1 * nr + nrm1 * nc,), dtype=nm.bool) nodes = nm.zeros((nm.prod(shape), 3), dtype=nm.float32) # edges idx = 0 row = nm.zeros((ncm1, 2), dtype=nm.int16) row[:, 0] = nm.arange(ncm1) row[:, 1] = nm.arange(ncm1) + 1 for ii in range(nr): edges[slice(idx, idx + ncm1), :] = row + nc * ii idx += ncm1 edge_dir[slice(0, idx)] = 0 # horizontal dir idx0 = idx col = nm.zeros((nrm1, 2), dtype=nm.int16) col[:, 0] = nm.arange(nrm1) * nc col[:, 1] = nm.arange(nrm1) * nc + nc for ii in range(nc): edges[slice(idx, idx + nrm1), :] = col + ii idx += nrm1 edge_dir[slice(idx0, idx)] = 1 # vertical dir # nodes idx = 0 row = nm.zeros((nc, 3), dtype=nm.float32) row[:, 0] = voxelsize[0] * (nm.arange(nc) + 0.5) row[:, 1] = voxelsize[1] * 0.5 for ii in range(nr): nodes[slice(idx, idx + nc), :] = row row[:, 1] += voxelsize[1] idx += nc return nodes, edges, edge_dir
[ "def", "gen_grid_2d", "(", "shape", ",", "voxelsize", ")", ":", "nr", ",", "nc", "=", "shape", "nrm1", ",", "ncm1", "=", "nr", "-", "1", ",", "nc", "-", "1", "# sh = nm.asarray(shape)", "# calculate number of edges, in 2D: (nrows * (ncols - 1)) + ((nrows - 1) * ncols)", "nedges", "=", "0", "for", "direction", "in", "range", "(", "len", "(", "shape", ")", ")", ":", "sh", "=", "copy", ".", "copy", "(", "list", "(", "shape", ")", ")", "sh", "[", "direction", "]", "+=", "-", "1", "nedges", "+=", "nm", ".", "prod", "(", "sh", ")", "nedges_old", "=", "ncm1", "*", "nr", "+", "nrm1", "*", "nc", "edges", "=", "nm", ".", "zeros", "(", "(", "nedges", ",", "2", ")", ",", "dtype", "=", "nm", ".", "int16", ")", "edge_dir", "=", "nm", ".", "zeros", "(", "(", "ncm1", "*", "nr", "+", "nrm1", "*", "nc", ",", ")", ",", "dtype", "=", "nm", ".", "bool", ")", "nodes", "=", "nm", ".", "zeros", "(", "(", "nm", ".", "prod", "(", "shape", ")", ",", "3", ")", ",", "dtype", "=", "nm", ".", "float32", ")", "# edges", "idx", "=", "0", "row", "=", "nm", ".", "zeros", "(", "(", "ncm1", ",", "2", ")", ",", "dtype", "=", "nm", ".", "int16", ")", "row", "[", ":", ",", "0", "]", "=", "nm", ".", "arange", "(", "ncm1", ")", "row", "[", ":", ",", "1", "]", "=", "nm", ".", "arange", "(", "ncm1", ")", "+", "1", "for", "ii", "in", "range", "(", "nr", ")", ":", "edges", "[", "slice", "(", "idx", ",", "idx", "+", "ncm1", ")", ",", ":", "]", "=", "row", "+", "nc", "*", "ii", "idx", "+=", "ncm1", "edge_dir", "[", "slice", "(", "0", ",", "idx", ")", "]", "=", "0", "# horizontal dir", "idx0", "=", "idx", "col", "=", "nm", ".", "zeros", "(", "(", "nrm1", ",", "2", ")", ",", "dtype", "=", "nm", ".", "int16", ")", "col", "[", ":", ",", "0", "]", "=", "nm", ".", "arange", "(", "nrm1", ")", "*", "nc", "col", "[", ":", ",", "1", "]", "=", "nm", ".", "arange", "(", "nrm1", ")", "*", "nc", "+", "nc", "for", "ii", "in", "range", "(", "nc", ")", ":", "edges", "[", "slice", "(", "idx", ",", "idx", "+", "nrm1", ")", ",", ":", "]", "=", "col", "+", "ii", "idx", "+=", "nrm1", "edge_dir", "[", "slice", "(", "idx0", ",", "idx", ")", "]", "=", "1", "# vertical dir", "# nodes", "idx", "=", "0", "row", "=", "nm", ".", "zeros", "(", "(", "nc", ",", "3", ")", ",", "dtype", "=", "nm", ".", "float32", ")", "row", "[", ":", ",", "0", "]", "=", "voxelsize", "[", "0", "]", "*", "(", "nm", ".", "arange", "(", "nc", ")", "+", "0.5", ")", "row", "[", ":", ",", "1", "]", "=", "voxelsize", "[", "1", "]", "*", "0.5", "for", "ii", "in", "range", "(", "nr", ")", ":", "nodes", "[", "slice", "(", "idx", ",", "idx", "+", "nc", ")", ",", ":", "]", "=", "row", "row", "[", ":", ",", "1", "]", "+=", "voxelsize", "[", "1", "]", "idx", "+=", "nc", "return", "nodes", ",", "edges", ",", "edge_dir" ]
Generate list of edges for a base grid.
[ "Generate", "list", "of", "edges", "for", "a", "base", "grid", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L586-L636
mjirik/imcut
imcut/graph.py
write_grid_to_vtk
def write_grid_to_vtk(fname, nodes, edges, node_flag=None, edge_flag=None): """ Write nodes and edges to VTK file :param fname: VTK filename :param nodes: :param edges: :param node_flag: set if this node is really used in output :param edge_flag: set if this flag is used in output :return: """ if node_flag is None: node_flag = np.ones([nodes.shape[0]], dtype=np.bool) if edge_flag is None: edge_flag = np.ones([edges.shape[0]], dtype=np.bool) nodes = make_nodes_3d(nodes) f = open(fname, "w") f.write("# vtk DataFile Version 2.6\n") f.write("output file\nASCII\nDATASET UNSTRUCTURED_GRID\n") idxs = nm.where(node_flag > 0)[0] nnd = len(idxs) aux = -nm.ones(node_flag.shape, dtype=nm.int32) aux[idxs] = nm.arange(nnd, dtype=nm.int32) f.write("\nPOINTS %d float\n" % nnd) for ndi in idxs: f.write("%.6f %.6f %.6f\n" % tuple(nodes[ndi, :])) idxs = nm.where(edge_flag > 0)[0] ned = len(idxs) f.write("\nCELLS %d %d\n" % (ned, ned * 3)) for edi in idxs: f.write("2 %d %d\n" % tuple(aux[edges[edi, :]])) f.write("\nCELL_TYPES %d\n" % ned) for edi in idxs: f.write("3\n")
python
def write_grid_to_vtk(fname, nodes, edges, node_flag=None, edge_flag=None): """ Write nodes and edges to VTK file :param fname: VTK filename :param nodes: :param edges: :param node_flag: set if this node is really used in output :param edge_flag: set if this flag is used in output :return: """ if node_flag is None: node_flag = np.ones([nodes.shape[0]], dtype=np.bool) if edge_flag is None: edge_flag = np.ones([edges.shape[0]], dtype=np.bool) nodes = make_nodes_3d(nodes) f = open(fname, "w") f.write("# vtk DataFile Version 2.6\n") f.write("output file\nASCII\nDATASET UNSTRUCTURED_GRID\n") idxs = nm.where(node_flag > 0)[0] nnd = len(idxs) aux = -nm.ones(node_flag.shape, dtype=nm.int32) aux[idxs] = nm.arange(nnd, dtype=nm.int32) f.write("\nPOINTS %d float\n" % nnd) for ndi in idxs: f.write("%.6f %.6f %.6f\n" % tuple(nodes[ndi, :])) idxs = nm.where(edge_flag > 0)[0] ned = len(idxs) f.write("\nCELLS %d %d\n" % (ned, ned * 3)) for edi in idxs: f.write("2 %d %d\n" % tuple(aux[edges[edi, :]])) f.write("\nCELL_TYPES %d\n" % ned) for edi in idxs: f.write("3\n")
[ "def", "write_grid_to_vtk", "(", "fname", ",", "nodes", ",", "edges", ",", "node_flag", "=", "None", ",", "edge_flag", "=", "None", ")", ":", "if", "node_flag", "is", "None", ":", "node_flag", "=", "np", ".", "ones", "(", "[", "nodes", ".", "shape", "[", "0", "]", "]", ",", "dtype", "=", "np", ".", "bool", ")", "if", "edge_flag", "is", "None", ":", "edge_flag", "=", "np", ".", "ones", "(", "[", "edges", ".", "shape", "[", "0", "]", "]", ",", "dtype", "=", "np", ".", "bool", ")", "nodes", "=", "make_nodes_3d", "(", "nodes", ")", "f", "=", "open", "(", "fname", ",", "\"w\"", ")", "f", ".", "write", "(", "\"# vtk DataFile Version 2.6\\n\"", ")", "f", ".", "write", "(", "\"output file\\nASCII\\nDATASET UNSTRUCTURED_GRID\\n\"", ")", "idxs", "=", "nm", ".", "where", "(", "node_flag", ">", "0", ")", "[", "0", "]", "nnd", "=", "len", "(", "idxs", ")", "aux", "=", "-", "nm", ".", "ones", "(", "node_flag", ".", "shape", ",", "dtype", "=", "nm", ".", "int32", ")", "aux", "[", "idxs", "]", "=", "nm", ".", "arange", "(", "nnd", ",", "dtype", "=", "nm", ".", "int32", ")", "f", ".", "write", "(", "\"\\nPOINTS %d float\\n\"", "%", "nnd", ")", "for", "ndi", "in", "idxs", ":", "f", ".", "write", "(", "\"%.6f %.6f %.6f\\n\"", "%", "tuple", "(", "nodes", "[", "ndi", ",", ":", "]", ")", ")", "idxs", "=", "nm", ".", "where", "(", "edge_flag", ">", "0", ")", "[", "0", "]", "ned", "=", "len", "(", "idxs", ")", "f", ".", "write", "(", "\"\\nCELLS %d %d\\n\"", "%", "(", "ned", ",", "ned", "*", "3", ")", ")", "for", "edi", "in", "idxs", ":", "f", ".", "write", "(", "\"2 %d %d\\n\"", "%", "tuple", "(", "aux", "[", "edges", "[", "edi", ",", ":", "]", "]", ")", ")", "f", ".", "write", "(", "\"\\nCELL_TYPES %d\\n\"", "%", "ned", ")", "for", "edi", "in", "idxs", ":", "f", ".", "write", "(", "\"3\\n\"", ")" ]
Write nodes and edges to VTK file :param fname: VTK filename :param nodes: :param edges: :param node_flag: set if this node is really used in output :param edge_flag: set if this flag is used in output :return:
[ "Write", "nodes", "and", "edges", "to", "VTK", "file", ":", "param", "fname", ":", "VTK", "filename", ":", "param", "nodes", ":", ":", "param", "edges", ":", ":", "param", "node_flag", ":", "set", "if", "this", "node", "is", "really", "used", "in", "output", ":", "param", "edge_flag", ":", "set", "if", "this", "flag", "is", "used", "in", "output", ":", "return", ":" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L646-L683
mjirik/imcut
imcut/graph.py
Graph.add_nodes
def add_nodes(self, coors, node_low_or_high=None): """ Add new nodes at the end of the list. """ last = self.lastnode if type(coors) is nm.ndarray: if len(coors.shape) == 1: coors = coors.reshape((1, coors.size)) nadd = coors.shape[0] idx = slice(last, last + nadd) else: nadd = 1 idx = self.lastnode right_dimension = coors.shape[1] self.nodes[idx, :right_dimension] = coors self.node_flag[idx] = True self.lastnode += nadd self.nnodes += nadd
python
def add_nodes(self, coors, node_low_or_high=None): """ Add new nodes at the end of the list. """ last = self.lastnode if type(coors) is nm.ndarray: if len(coors.shape) == 1: coors = coors.reshape((1, coors.size)) nadd = coors.shape[0] idx = slice(last, last + nadd) else: nadd = 1 idx = self.lastnode right_dimension = coors.shape[1] self.nodes[idx, :right_dimension] = coors self.node_flag[idx] = True self.lastnode += nadd self.nnodes += nadd
[ "def", "add_nodes", "(", "self", ",", "coors", ",", "node_low_or_high", "=", "None", ")", ":", "last", "=", "self", ".", "lastnode", "if", "type", "(", "coors", ")", "is", "nm", ".", "ndarray", ":", "if", "len", "(", "coors", ".", "shape", ")", "==", "1", ":", "coors", "=", "coors", ".", "reshape", "(", "(", "1", ",", "coors", ".", "size", ")", ")", "nadd", "=", "coors", ".", "shape", "[", "0", "]", "idx", "=", "slice", "(", "last", ",", "last", "+", "nadd", ")", "else", ":", "nadd", "=", "1", "idx", "=", "self", ".", "lastnode", "right_dimension", "=", "coors", ".", "shape", "[", "1", "]", "self", ".", "nodes", "[", "idx", ",", ":", "right_dimension", "]", "=", "coors", "self", ".", "node_flag", "[", "idx", "]", "=", "True", "self", ".", "lastnode", "+=", "nadd", "self", ".", "nnodes", "+=", "nadd" ]
Add new nodes at the end of the list.
[ "Add", "new", "nodes", "at", "the", "end", "of", "the", "list", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L153-L171
mjirik/imcut
imcut/graph.py
Graph.add_edges
def add_edges(self, conn, edge_direction, edge_group=None, edge_low_or_high=None): """ Add new edges at the end of the list. :param edge_direction: direction flag :param edge_group: describes group of edges from same low super node and same direction :param edge_low_or_high: zero for low to low resolution, one for high to high or high to low resolution. It is used to set weight from weight table. """ last = self.lastedge if type(conn) is nm.ndarray: nadd = conn.shape[0] idx = slice(last, last + nadd) if edge_group is None: edge_group = nm.arange(nadd) + last else: nadd = 1 idx = nm.array([last]) conn = nm.array(conn).reshape((1, 2)) if edge_group is None: edge_group = idx self.edges[idx, :] = conn self.edge_flag[idx] = True # t_start0 = time.time() # self.edge_flag_idx.extend(list(range(idx.start, idx.stop))) # self.stats["t split 082"] += time.time() - t_start0 self.edge_dir[idx] = edge_direction self.edge_group[idx] = edge_group # TODO change this just to array of low_or_high_resolution if edge_low_or_high is not None and self._edge_weight_table is not None: self.edges_weights[idx] = self._edge_weight_table[ edge_low_or_high, edge_direction ] self.lastedge += nadd self.nedges += nadd
python
def add_edges(self, conn, edge_direction, edge_group=None, edge_low_or_high=None): """ Add new edges at the end of the list. :param edge_direction: direction flag :param edge_group: describes group of edges from same low super node and same direction :param edge_low_or_high: zero for low to low resolution, one for high to high or high to low resolution. It is used to set weight from weight table. """ last = self.lastedge if type(conn) is nm.ndarray: nadd = conn.shape[0] idx = slice(last, last + nadd) if edge_group is None: edge_group = nm.arange(nadd) + last else: nadd = 1 idx = nm.array([last]) conn = nm.array(conn).reshape((1, 2)) if edge_group is None: edge_group = idx self.edges[idx, :] = conn self.edge_flag[idx] = True # t_start0 = time.time() # self.edge_flag_idx.extend(list(range(idx.start, idx.stop))) # self.stats["t split 082"] += time.time() - t_start0 self.edge_dir[idx] = edge_direction self.edge_group[idx] = edge_group # TODO change this just to array of low_or_high_resolution if edge_low_or_high is not None and self._edge_weight_table is not None: self.edges_weights[idx] = self._edge_weight_table[ edge_low_or_high, edge_direction ] self.lastedge += nadd self.nedges += nadd
[ "def", "add_edges", "(", "self", ",", "conn", ",", "edge_direction", ",", "edge_group", "=", "None", ",", "edge_low_or_high", "=", "None", ")", ":", "last", "=", "self", ".", "lastedge", "if", "type", "(", "conn", ")", "is", "nm", ".", "ndarray", ":", "nadd", "=", "conn", ".", "shape", "[", "0", "]", "idx", "=", "slice", "(", "last", ",", "last", "+", "nadd", ")", "if", "edge_group", "is", "None", ":", "edge_group", "=", "nm", ".", "arange", "(", "nadd", ")", "+", "last", "else", ":", "nadd", "=", "1", "idx", "=", "nm", ".", "array", "(", "[", "last", "]", ")", "conn", "=", "nm", ".", "array", "(", "conn", ")", ".", "reshape", "(", "(", "1", ",", "2", ")", ")", "if", "edge_group", "is", "None", ":", "edge_group", "=", "idx", "self", ".", "edges", "[", "idx", ",", ":", "]", "=", "conn", "self", ".", "edge_flag", "[", "idx", "]", "=", "True", "# t_start0 = time.time()", "# self.edge_flag_idx.extend(list(range(idx.start, idx.stop)))", "# self.stats[\"t split 082\"] += time.time() - t_start0", "self", ".", "edge_dir", "[", "idx", "]", "=", "edge_direction", "self", ".", "edge_group", "[", "idx", "]", "=", "edge_group", "# TODO change this just to array of low_or_high_resolution", "if", "edge_low_or_high", "is", "not", "None", "and", "self", ".", "_edge_weight_table", "is", "not", "None", ":", "self", ".", "edges_weights", "[", "idx", "]", "=", "self", ".", "_edge_weight_table", "[", "edge_low_or_high", ",", "edge_direction", "]", "self", ".", "lastedge", "+=", "nadd", "self", ".", "nedges", "+=", "nadd" ]
Add new edges at the end of the list. :param edge_direction: direction flag :param edge_group: describes group of edges from same low super node and same direction :param edge_low_or_high: zero for low to low resolution, one for high to high or high to low resolution. It is used to set weight from weight table.
[ "Add", "new", "edges", "at", "the", "end", "of", "the", "list", ".", ":", "param", "edge_direction", ":", "direction", "flag", ":", "param", "edge_group", ":", "describes", "group", "of", "edges", "from", "same", "low", "super", "node", "and", "same", "direction", ":", "param", "edge_low_or_high", ":", "zero", "for", "low", "to", "low", "resolution", "one", "for", "high", "to", "high", "or", "high", "to", "low", "resolution", ".", "It", "is", "used", "to", "set", "weight", "from", "weight", "table", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L173-L207
mjirik/imcut
imcut/graph.py
Graph._edge_group_substitution
def _edge_group_substitution( self, ndid, nsplit, idxs, sr_tab, ndoffset, ed_remove, into_or_from ): """ Reconnect edges. :param ndid: id of low resolution edges :param nsplit: number of split :param idxs: indexes of low resolution :param sr_tab: :param ndoffset: :param ed_remove: :param into_or_from: if zero, connection of input edges is done. If one, connection of output edges is performed. :return: """ # this is useful for type(idxs) == np.ndarray eidxs = idxs[nm.where(self.edges[idxs, 1 - into_or_from] == ndid)[0]] # selected_edges = self.edges[idxs, 1 - into_or_from] # selected_edges == ndid # whre = nm.where(self.edges[idxs, 1 - into_or_from] == ndid) # whre0 = (nm.where(self.edges[idxs, 1 - into_or_from] == ndid) == ndid)[0] # eidxs = [idxs[i] for i in idxs] for igrp in self.edges_by_group(eidxs): if igrp.shape[0] > 1: # high resolution block to high resolution block # all directions are the same directions = self.edge_dir[igrp[0]] edge_indexes = sr_tab[directions, :].T.flatten() + ndoffset # debug code # if len(igrp) != len(edge_indexes): # print("Problem ") self.edges[igrp, 1] = edge_indexes if self._edge_weight_table is not None: self.edges_weights[igrp] = self._edge_weight_table[1, directions] else: # low res block to hi res block, if into_or_from is set to 0 # hig res block to low res block, if into_or_from is set to 1 ed_remove.append(igrp[0]) # number of new edges is equal to number of pixels on one side of the box (in 2D and D too) nnewed = np.power(nsplit, self.data.ndim - 1) muleidxs = nm.tile(igrp, nnewed) # copy the low-res edge multipletime newed = self.edges[muleidxs, :] neweddir = self.edge_dir[muleidxs] local_node_ids = sr_tab[ self.edge_dir[igrp] + self.data.ndim * into_or_from, : ].T.flatten() # first or second (the actual) node id is substitued by new node indexes newed[:, 1 - into_or_from] = local_node_ids + ndoffset if self._edge_weight_table is not None: self.add_edges( newed, neweddir, self.edge_group[igrp], edge_low_or_high=1 ) else: self.add_edges( newed, neweddir, self.edge_group[igrp], edge_low_or_high=None ) return ed_remove
python
def _edge_group_substitution( self, ndid, nsplit, idxs, sr_tab, ndoffset, ed_remove, into_or_from ): """ Reconnect edges. :param ndid: id of low resolution edges :param nsplit: number of split :param idxs: indexes of low resolution :param sr_tab: :param ndoffset: :param ed_remove: :param into_or_from: if zero, connection of input edges is done. If one, connection of output edges is performed. :return: """ # this is useful for type(idxs) == np.ndarray eidxs = idxs[nm.where(self.edges[idxs, 1 - into_or_from] == ndid)[0]] # selected_edges = self.edges[idxs, 1 - into_or_from] # selected_edges == ndid # whre = nm.where(self.edges[idxs, 1 - into_or_from] == ndid) # whre0 = (nm.where(self.edges[idxs, 1 - into_or_from] == ndid) == ndid)[0] # eidxs = [idxs[i] for i in idxs] for igrp in self.edges_by_group(eidxs): if igrp.shape[0] > 1: # high resolution block to high resolution block # all directions are the same directions = self.edge_dir[igrp[0]] edge_indexes = sr_tab[directions, :].T.flatten() + ndoffset # debug code # if len(igrp) != len(edge_indexes): # print("Problem ") self.edges[igrp, 1] = edge_indexes if self._edge_weight_table is not None: self.edges_weights[igrp] = self._edge_weight_table[1, directions] else: # low res block to hi res block, if into_or_from is set to 0 # hig res block to low res block, if into_or_from is set to 1 ed_remove.append(igrp[0]) # number of new edges is equal to number of pixels on one side of the box (in 2D and D too) nnewed = np.power(nsplit, self.data.ndim - 1) muleidxs = nm.tile(igrp, nnewed) # copy the low-res edge multipletime newed = self.edges[muleidxs, :] neweddir = self.edge_dir[muleidxs] local_node_ids = sr_tab[ self.edge_dir[igrp] + self.data.ndim * into_or_from, : ].T.flatten() # first or second (the actual) node id is substitued by new node indexes newed[:, 1 - into_or_from] = local_node_ids + ndoffset if self._edge_weight_table is not None: self.add_edges( newed, neweddir, self.edge_group[igrp], edge_low_or_high=1 ) else: self.add_edges( newed, neweddir, self.edge_group[igrp], edge_low_or_high=None ) return ed_remove
[ "def", "_edge_group_substitution", "(", "self", ",", "ndid", ",", "nsplit", ",", "idxs", ",", "sr_tab", ",", "ndoffset", ",", "ed_remove", ",", "into_or_from", ")", ":", "# this is useful for type(idxs) == np.ndarray", "eidxs", "=", "idxs", "[", "nm", ".", "where", "(", "self", ".", "edges", "[", "idxs", ",", "1", "-", "into_or_from", "]", "==", "ndid", ")", "[", "0", "]", "]", "# selected_edges = self.edges[idxs, 1 - into_or_from]", "# selected_edges == ndid", "# whre = nm.where(self.edges[idxs, 1 - into_or_from] == ndid)", "# whre0 = (nm.where(self.edges[idxs, 1 - into_or_from] == ndid) == ndid)[0]", "# eidxs = [idxs[i] for i in idxs]", "for", "igrp", "in", "self", ".", "edges_by_group", "(", "eidxs", ")", ":", "if", "igrp", ".", "shape", "[", "0", "]", ">", "1", ":", "# high resolution block to high resolution block", "# all directions are the same", "directions", "=", "self", ".", "edge_dir", "[", "igrp", "[", "0", "]", "]", "edge_indexes", "=", "sr_tab", "[", "directions", ",", ":", "]", ".", "T", ".", "flatten", "(", ")", "+", "ndoffset", "# debug code", "# if len(igrp) != len(edge_indexes):", "# print(\"Problem \")", "self", ".", "edges", "[", "igrp", ",", "1", "]", "=", "edge_indexes", "if", "self", ".", "_edge_weight_table", "is", "not", "None", ":", "self", ".", "edges_weights", "[", "igrp", "]", "=", "self", ".", "_edge_weight_table", "[", "1", ",", "directions", "]", "else", ":", "# low res block to hi res block, if into_or_from is set to 0", "# hig res block to low res block, if into_or_from is set to 1", "ed_remove", ".", "append", "(", "igrp", "[", "0", "]", ")", "# number of new edges is equal to number of pixels on one side of the box (in 2D and D too)", "nnewed", "=", "np", ".", "power", "(", "nsplit", ",", "self", ".", "data", ".", "ndim", "-", "1", ")", "muleidxs", "=", "nm", ".", "tile", "(", "igrp", ",", "nnewed", ")", "# copy the low-res edge multipletime", "newed", "=", "self", ".", "edges", "[", "muleidxs", ",", ":", "]", "neweddir", "=", "self", ".", "edge_dir", "[", "muleidxs", "]", "local_node_ids", "=", "sr_tab", "[", "self", ".", "edge_dir", "[", "igrp", "]", "+", "self", ".", "data", ".", "ndim", "*", "into_or_from", ",", ":", "]", ".", "T", ".", "flatten", "(", ")", "# first or second (the actual) node id is substitued by new node indexes", "newed", "[", ":", ",", "1", "-", "into_or_from", "]", "=", "local_node_ids", "+", "ndoffset", "if", "self", ".", "_edge_weight_table", "is", "not", "None", ":", "self", ".", "add_edges", "(", "newed", ",", "neweddir", ",", "self", ".", "edge_group", "[", "igrp", "]", ",", "edge_low_or_high", "=", "1", ")", "else", ":", "self", ".", "add_edges", "(", "newed", ",", "neweddir", ",", "self", ".", "edge_group", "[", "igrp", "]", ",", "edge_low_or_high", "=", "None", ")", "return", "ed_remove" ]
Reconnect edges. :param ndid: id of low resolution edges :param nsplit: number of split :param idxs: indexes of low resolution :param sr_tab: :param ndoffset: :param ed_remove: :param into_or_from: if zero, connection of input edges is done. If one, connection of output edges is performed. :return:
[ "Reconnect", "edges", ".", ":", "param", "ndid", ":", "id", "of", "low", "resolution", "edges", ":", "param", "nsplit", ":", "number", "of", "split", ":", "param", "idxs", ":", "indexes", "of", "low", "resolution", ":", "param", "sr_tab", ":", ":", "param", "ndoffset", ":", ":", "param", "ed_remove", ":", ":", "param", "into_or_from", ":", "if", "zero", "connection", "of", "input", "edges", "is", "done", ".", "If", "one", "connection", "of", "output", "edges", "is", "performed", ".", ":", "return", ":" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L264-L321
mjirik/imcut
imcut/graph.py
Graph.generate_base_grid
def generate_base_grid(self, vtk_filename=None): """ Run first step of algorithm. Next step is split_voxels :param vtk_filename: :return: """ nd, ed, ed_dir = self.gen_grid_fcn(self.data.shape, self.voxelsize) self.add_nodes(nd) self.add_edges(ed, ed_dir, edge_low_or_high=0) if vtk_filename is not None: self.write_vtk(vtk_filename)
python
def generate_base_grid(self, vtk_filename=None): """ Run first step of algorithm. Next step is split_voxels :param vtk_filename: :return: """ nd, ed, ed_dir = self.gen_grid_fcn(self.data.shape, self.voxelsize) self.add_nodes(nd) self.add_edges(ed, ed_dir, edge_low_or_high=0) if vtk_filename is not None: self.write_vtk(vtk_filename)
[ "def", "generate_base_grid", "(", "self", ",", "vtk_filename", "=", "None", ")", ":", "nd", ",", "ed", ",", "ed_dir", "=", "self", ".", "gen_grid_fcn", "(", "self", ".", "data", ".", "shape", ",", "self", ".", "voxelsize", ")", "self", ".", "add_nodes", "(", "nd", ")", "self", ".", "add_edges", "(", "ed", ",", "ed_dir", ",", "edge_low_or_high", "=", "0", ")", "if", "vtk_filename", "is", "not", "None", ":", "self", ".", "write_vtk", "(", "vtk_filename", ")" ]
Run first step of algorithm. Next step is split_voxels :param vtk_filename: :return:
[ "Run", "first", "step", "of", "algorithm", ".", "Next", "step", "is", "split_voxels", ":", "param", "vtk_filename", ":", ":", "return", ":" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L404-L415
mjirik/imcut
imcut/graph.py
Graph.split_voxels
def split_voxels(self, vtk_filename=None): """ Second step of algorithm :return:() """ self.cache = {} self.stats["t graph 10"] = time.time() - self.start_time self.msi = MultiscaleArray(self.data.shape, block_size=self.nsplit) # old implementation # idxs = nm.where(self.data) # nr, nc = self.data.shape # for k, (ir, ic) in enumerate(zip(*idxs)): # ndid = ic + ir * nc # self.split_voxel(ndid, self.nsplit) # new_implementation # for ndid in np.flatnonzero(self.data): # self.split_voxel(ndid, self.nsplit) # even newer implementation self.stats["t graph 11"] = time.time() - self.start_time for ndid, val in enumerate(self.data.ravel()): t_split_start = time.time() if val == 0: if self.compute_msindex: self.msi.set_block_lowres(ndid, ndid) self.stats["t graph low"] += time.time() - t_split_start else: self.split_voxel(ndid) self.stats["t graph high"] += time.time() - t_split_start self.stats["t graph 13"] = time.time() - self.start_time self.finish() if vtk_filename is not None: self.write_vtk(vtk_filename) self.stats["t graph 14"] = time.time() - self.start_time
python
def split_voxels(self, vtk_filename=None): """ Second step of algorithm :return:() """ self.cache = {} self.stats["t graph 10"] = time.time() - self.start_time self.msi = MultiscaleArray(self.data.shape, block_size=self.nsplit) # old implementation # idxs = nm.where(self.data) # nr, nc = self.data.shape # for k, (ir, ic) in enumerate(zip(*idxs)): # ndid = ic + ir * nc # self.split_voxel(ndid, self.nsplit) # new_implementation # for ndid in np.flatnonzero(self.data): # self.split_voxel(ndid, self.nsplit) # even newer implementation self.stats["t graph 11"] = time.time() - self.start_time for ndid, val in enumerate(self.data.ravel()): t_split_start = time.time() if val == 0: if self.compute_msindex: self.msi.set_block_lowres(ndid, ndid) self.stats["t graph low"] += time.time() - t_split_start else: self.split_voxel(ndid) self.stats["t graph high"] += time.time() - t_split_start self.stats["t graph 13"] = time.time() - self.start_time self.finish() if vtk_filename is not None: self.write_vtk(vtk_filename) self.stats["t graph 14"] = time.time() - self.start_time
[ "def", "split_voxels", "(", "self", ",", "vtk_filename", "=", "None", ")", ":", "self", ".", "cache", "=", "{", "}", "self", ".", "stats", "[", "\"t graph 10\"", "]", "=", "time", ".", "time", "(", ")", "-", "self", ".", "start_time", "self", ".", "msi", "=", "MultiscaleArray", "(", "self", ".", "data", ".", "shape", ",", "block_size", "=", "self", ".", "nsplit", ")", "# old implementation", "# idxs = nm.where(self.data)", "# nr, nc = self.data.shape", "# for k, (ir, ic) in enumerate(zip(*idxs)):", "# ndid = ic + ir * nc", "# self.split_voxel(ndid, self.nsplit)", "# new_implementation", "# for ndid in np.flatnonzero(self.data):", "# self.split_voxel(ndid, self.nsplit)", "# even newer implementation", "self", ".", "stats", "[", "\"t graph 11\"", "]", "=", "time", ".", "time", "(", ")", "-", "self", ".", "start_time", "for", "ndid", ",", "val", "in", "enumerate", "(", "self", ".", "data", ".", "ravel", "(", ")", ")", ":", "t_split_start", "=", "time", ".", "time", "(", ")", "if", "val", "==", "0", ":", "if", "self", ".", "compute_msindex", ":", "self", ".", "msi", ".", "set_block_lowres", "(", "ndid", ",", "ndid", ")", "self", ".", "stats", "[", "\"t graph low\"", "]", "+=", "time", ".", "time", "(", ")", "-", "t_split_start", "else", ":", "self", ".", "split_voxel", "(", "ndid", ")", "self", ".", "stats", "[", "\"t graph high\"", "]", "+=", "time", ".", "time", "(", ")", "-", "t_split_start", "self", ".", "stats", "[", "\"t graph 13\"", "]", "=", "time", ".", "time", "(", ")", "-", "self", ".", "start_time", "self", ".", "finish", "(", ")", "if", "vtk_filename", "is", "not", "None", ":", "self", ".", "write_vtk", "(", "vtk_filename", ")", "self", ".", "stats", "[", "\"t graph 14\"", "]", "=", "time", ".", "time", "(", ")", "-", "self", ".", "start_time" ]
Second step of algorithm :return:()
[ "Second", "step", "of", "algorithm", ":", "return", ":", "()" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L417-L453
mjirik/imcut
imcut/graph.py
MultiscaleArray.mul_block
def mul_block(self, index, val): """Multiply values in block""" self._prepare_cache_slice(index) self.msinds[self.cache_slice] *= val
python
def mul_block(self, index, val): """Multiply values in block""" self._prepare_cache_slice(index) self.msinds[self.cache_slice] *= val
[ "def", "mul_block", "(", "self", ",", "index", ",", "val", ")", ":", "self", ".", "_prepare_cache_slice", "(", "index", ")", "self", ".", "msinds", "[", "self", ".", "cache_slice", "]", "*=", "val" ]
Multiply values in block
[ "Multiply", "values", "in", "block" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L714-L717
mjirik/imcut
imcut/features.py
select_from_fv_by_seeds
def select_from_fv_by_seeds(fv, seeds, unique_cls): """ Tool to make simple feature functions take features from feature array by seeds. :param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number of features :param seeds: ndarray with seeds. Does not to be linear. :param unique_cls: number of used seeds clases. Like [1, 2] :return: fv_selection, seeds_selection - selection from feature vector and selection from seeds """ logger.debug("seeds" + str(seeds)) # fvlin = fv.reshape(-1, int(fv.size/seeds.size)) expected_shape = [seeds.size, int(fv.size/seeds.size)] if fv.shape[0] != expected_shape[0] or fv.shape[1] != expected_shape[1]: raise AssertionError("Wrong shape of input feature vector array fv") # sd = seeds.reshape(-1, 1) selection = np.in1d(seeds, unique_cls) fv_selection = fv[selection] seeds_selection = seeds.flatten()[selection] # sd = sd[] return fv_selection, seeds_selection
python
def select_from_fv_by_seeds(fv, seeds, unique_cls): """ Tool to make simple feature functions take features from feature array by seeds. :param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number of features :param seeds: ndarray with seeds. Does not to be linear. :param unique_cls: number of used seeds clases. Like [1, 2] :return: fv_selection, seeds_selection - selection from feature vector and selection from seeds """ logger.debug("seeds" + str(seeds)) # fvlin = fv.reshape(-1, int(fv.size/seeds.size)) expected_shape = [seeds.size, int(fv.size/seeds.size)] if fv.shape[0] != expected_shape[0] or fv.shape[1] != expected_shape[1]: raise AssertionError("Wrong shape of input feature vector array fv") # sd = seeds.reshape(-1, 1) selection = np.in1d(seeds, unique_cls) fv_selection = fv[selection] seeds_selection = seeds.flatten()[selection] # sd = sd[] return fv_selection, seeds_selection
[ "def", "select_from_fv_by_seeds", "(", "fv", ",", "seeds", ",", "unique_cls", ")", ":", "logger", ".", "debug", "(", "\"seeds\"", "+", "str", "(", "seeds", ")", ")", "# fvlin = fv.reshape(-1, int(fv.size/seeds.size))", "expected_shape", "=", "[", "seeds", ".", "size", ",", "int", "(", "fv", ".", "size", "/", "seeds", ".", "size", ")", "]", "if", "fv", ".", "shape", "[", "0", "]", "!=", "expected_shape", "[", "0", "]", "or", "fv", ".", "shape", "[", "1", "]", "!=", "expected_shape", "[", "1", "]", ":", "raise", "AssertionError", "(", "\"Wrong shape of input feature vector array fv\"", ")", "# sd = seeds.reshape(-1, 1)", "selection", "=", "np", ".", "in1d", "(", "seeds", ",", "unique_cls", ")", "fv_selection", "=", "fv", "[", "selection", "]", "seeds_selection", "=", "seeds", ".", "flatten", "(", ")", "[", "selection", "]", "# sd = sd[]", "return", "fv_selection", ",", "seeds_selection" ]
Tool to make simple feature functions take features from feature array by seeds. :param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number of features :param seeds: ndarray with seeds. Does not to be linear. :param unique_cls: number of used seeds clases. Like [1, 2] :return: fv_selection, seeds_selection - selection from feature vector and selection from seeds
[ "Tool", "to", "make", "simple", "feature", "functions", "take", "features", "from", "feature", "array", "by", "seeds", ".", ":", "param", "fv", ":", "ndarray", "with", "lineariezed", "feature", ".", "It", "s", "shape", "is", "MxN", "where", "M", "is", "number", "of", "image", "pixels", "and", "N", "is", "number", "of", "features", ":", "param", "seeds", ":", "ndarray", "with", "seeds", ".", "Does", "not", "to", "be", "linear", ".", ":", "param", "unique_cls", ":", "number", "of", "used", "seeds", "clases", ".", "Like", "[", "1", "2", "]", ":", "return", ":", "fv_selection", "seeds_selection", "-", "selection", "from", "feature", "vector", "and", "selection", "from", "seeds" ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/features.py#L39-L58
mjirik/imcut
imcut/features.py
return_fv_by_seeds
def return_fv_by_seeds(fv, seeds=None, unique_cls=None): """ Return features selected by seeds and unique_cls or selection from features and corresponding seed classes. :param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number of features :param seeds: ndarray with seeds. Does not to be linear. :param unique_cls: number of used seeds clases. Like [1, 2] :return: fv, sd - selection from feature vector and selection from seeds or just fv for whole image """ if seeds is not None: if unique_cls is not None: return select_from_fv_by_seeds(fv, seeds, unique_cls) else: raise AssertionError("Input unique_cls has to be not None if seeds is not None.") else: return fv
python
def return_fv_by_seeds(fv, seeds=None, unique_cls=None): """ Return features selected by seeds and unique_cls or selection from features and corresponding seed classes. :param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number of features :param seeds: ndarray with seeds. Does not to be linear. :param unique_cls: number of used seeds clases. Like [1, 2] :return: fv, sd - selection from feature vector and selection from seeds or just fv for whole image """ if seeds is not None: if unique_cls is not None: return select_from_fv_by_seeds(fv, seeds, unique_cls) else: raise AssertionError("Input unique_cls has to be not None if seeds is not None.") else: return fv
[ "def", "return_fv_by_seeds", "(", "fv", ",", "seeds", "=", "None", ",", "unique_cls", "=", "None", ")", ":", "if", "seeds", "is", "not", "None", ":", "if", "unique_cls", "is", "not", "None", ":", "return", "select_from_fv_by_seeds", "(", "fv", ",", "seeds", ",", "unique_cls", ")", "else", ":", "raise", "AssertionError", "(", "\"Input unique_cls has to be not None if seeds is not None.\"", ")", "else", ":", "return", "fv" ]
Return features selected by seeds and unique_cls or selection from features and corresponding seed classes. :param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number of features :param seeds: ndarray with seeds. Does not to be linear. :param unique_cls: number of used seeds clases. Like [1, 2] :return: fv, sd - selection from feature vector and selection from seeds or just fv for whole image
[ "Return", "features", "selected", "by", "seeds", "and", "unique_cls", "or", "selection", "from", "features", "and", "corresponding", "seed", "classes", "." ]
train
https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/features.py#L60-L76
chitamoor/Rester
rester/manifest.py
Variables.expand
def expand(self, expression): """Expands logical constructions.""" self.logger.debug("expand : expression %s", str(expression)) if not is_string(expression): return expression result = self._pattern.sub(lambda var: str(self._variables[var.group(1)]), expression) result = result.strip() self.logger.debug('expand : %s - result : %s', expression, result) if is_number(result): if result.isdigit(): self.logger.debug(' expand is integer !!!') return int(result) else: self.logger.debug(' expand is float !!!') return float(result) return result
python
def expand(self, expression): """Expands logical constructions.""" self.logger.debug("expand : expression %s", str(expression)) if not is_string(expression): return expression result = self._pattern.sub(lambda var: str(self._variables[var.group(1)]), expression) result = result.strip() self.logger.debug('expand : %s - result : %s', expression, result) if is_number(result): if result.isdigit(): self.logger.debug(' expand is integer !!!') return int(result) else: self.logger.debug(' expand is float !!!') return float(result) return result
[ "def", "expand", "(", "self", ",", "expression", ")", ":", "self", ".", "logger", ".", "debug", "(", "\"expand : expression %s\"", ",", "str", "(", "expression", ")", ")", "if", "not", "is_string", "(", "expression", ")", ":", "return", "expression", "result", "=", "self", ".", "_pattern", ".", "sub", "(", "lambda", "var", ":", "str", "(", "self", ".", "_variables", "[", "var", ".", "group", "(", "1", ")", "]", ")", ",", "expression", ")", "result", "=", "result", ".", "strip", "(", ")", "self", ".", "logger", ".", "debug", "(", "'expand : %s - result : %s'", ",", "expression", ",", "result", ")", "if", "is_number", "(", "result", ")", ":", "if", "result", ".", "isdigit", "(", ")", ":", "self", ".", "logger", ".", "debug", "(", "' expand is integer !!!'", ")", "return", "int", "(", "result", ")", "else", ":", "self", ".", "logger", ".", "debug", "(", "' expand is float !!!'", ")", "return", "float", "(", "result", ")", "return", "result" ]
Expands logical constructions.
[ "Expands", "logical", "constructions", "." ]
train
https://github.com/chitamoor/Rester/blob/1865b17f70b7c597aeadde2d0907cb1b59f10c0f/rester/manifest.py#L34-L52
disqus/gutter
gutter/client/__init__.py
get_gutter_client
def get_gutter_client( alias='default', cache=CLIENT_CACHE, **kwargs ): """ Creates gutter clients and memoizes them in a registry for future quick access. Args: alias (str or None): Name of the client. Used for caching. If name is falsy then do not use the cache. cache (dict): cache to store gutter managers in. **kwargs: kwargs to be passed the Manger class. Returns (Manager): A gutter client. """ from gutter.client.models import Manager if not alias: return Manager(**kwargs) elif alias not in cache: cache[alias] = Manager(**kwargs) return cache[alias]
python
def get_gutter_client( alias='default', cache=CLIENT_CACHE, **kwargs ): """ Creates gutter clients and memoizes them in a registry for future quick access. Args: alias (str or None): Name of the client. Used for caching. If name is falsy then do not use the cache. cache (dict): cache to store gutter managers in. **kwargs: kwargs to be passed the Manger class. Returns (Manager): A gutter client. """ from gutter.client.models import Manager if not alias: return Manager(**kwargs) elif alias not in cache: cache[alias] = Manager(**kwargs) return cache[alias]
[ "def", "get_gutter_client", "(", "alias", "=", "'default'", ",", "cache", "=", "CLIENT_CACHE", ",", "*", "*", "kwargs", ")", ":", "from", "gutter", ".", "client", ".", "models", "import", "Manager", "if", "not", "alias", ":", "return", "Manager", "(", "*", "*", "kwargs", ")", "elif", "alias", "not", "in", "cache", ":", "cache", "[", "alias", "]", "=", "Manager", "(", "*", "*", "kwargs", ")", "return", "cache", "[", "alias", "]" ]
Creates gutter clients and memoizes them in a registry for future quick access. Args: alias (str or None): Name of the client. Used for caching. If name is falsy then do not use the cache. cache (dict): cache to store gutter managers in. **kwargs: kwargs to be passed the Manger class. Returns (Manager): A gutter client.
[ "Creates", "gutter", "clients", "and", "memoizes", "them", "in", "a", "registry", "for", "future", "quick", "access", "." ]
train
https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/__init__.py#L17-L42
disqus/gutter
gutter/client/operators/misc.py
PercentRange._modulo
def _modulo(self, decimal_argument): """ The mod operator is prone to floating point errors, so use decimal. 101.1 % 100 >>> 1.0999999999999943 decimal_context.divmod(Decimal('100.1'), 100) >>> (Decimal('1'), Decimal('0.1')) """ _times, remainder = self._context.divmod(decimal_argument, 100) # match the builtin % behavior by adding the N to the result if negative return remainder if remainder >= 0 else remainder + 100
python
def _modulo(self, decimal_argument): """ The mod operator is prone to floating point errors, so use decimal. 101.1 % 100 >>> 1.0999999999999943 decimal_context.divmod(Decimal('100.1'), 100) >>> (Decimal('1'), Decimal('0.1')) """ _times, remainder = self._context.divmod(decimal_argument, 100) # match the builtin % behavior by adding the N to the result if negative return remainder if remainder >= 0 else remainder + 100
[ "def", "_modulo", "(", "self", ",", "decimal_argument", ")", ":", "_times", ",", "remainder", "=", "self", ".", "_context", ".", "divmod", "(", "decimal_argument", ",", "100", ")", "# match the builtin % behavior by adding the N to the result if negative", "return", "remainder", "if", "remainder", ">=", "0", "else", "remainder", "+", "100" ]
The mod operator is prone to floating point errors, so use decimal. 101.1 % 100 >>> 1.0999999999999943 decimal_context.divmod(Decimal('100.1'), 100) >>> (Decimal('1'), Decimal('0.1'))
[ "The", "mod", "operator", "is", "prone", "to", "floating", "point", "errors", "so", "use", "decimal", "." ]
train
https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/operators/misc.py#L16-L29
disqus/gutter
gutter/client/models.py
Switch.enabled_for
def enabled_for(self, inpt): """ Checks to see if this switch is enabled for the provided input. If ``compounded``, all switch conditions must be ``True`` for the switch to be enabled. Otherwise, *any* condition needs to be ``True`` for the switch to be enabled. The switch state is then checked to see if it is ``GLOBAL`` or ``DISABLED``. If it is not, then the switch is ``SELECTIVE`` and each condition is checked. Keyword Arguments: inpt -- An instance of the ``Input`` class. """ signals.switch_checked.call(self) signal_decorated = partial(self.__signal_and_return, inpt) if self.state is self.states.GLOBAL: return signal_decorated(True) elif self.state is self.states.DISABLED: return signal_decorated(False) conditions_dict = ConditionsDict.from_conditions_list(self.conditions) conditions = conditions_dict.get_by_input(inpt) if conditions: result = self.__enabled_func( cond.call(inpt) for cond in conditions if cond.argument(inpt).applies ) else: result = None return signal_decorated(result)
python
def enabled_for(self, inpt): """ Checks to see if this switch is enabled for the provided input. If ``compounded``, all switch conditions must be ``True`` for the switch to be enabled. Otherwise, *any* condition needs to be ``True`` for the switch to be enabled. The switch state is then checked to see if it is ``GLOBAL`` or ``DISABLED``. If it is not, then the switch is ``SELECTIVE`` and each condition is checked. Keyword Arguments: inpt -- An instance of the ``Input`` class. """ signals.switch_checked.call(self) signal_decorated = partial(self.__signal_and_return, inpt) if self.state is self.states.GLOBAL: return signal_decorated(True) elif self.state is self.states.DISABLED: return signal_decorated(False) conditions_dict = ConditionsDict.from_conditions_list(self.conditions) conditions = conditions_dict.get_by_input(inpt) if conditions: result = self.__enabled_func( cond.call(inpt) for cond in conditions if cond.argument(inpt).applies ) else: result = None return signal_decorated(result)
[ "def", "enabled_for", "(", "self", ",", "inpt", ")", ":", "signals", ".", "switch_checked", ".", "call", "(", "self", ")", "signal_decorated", "=", "partial", "(", "self", ".", "__signal_and_return", ",", "inpt", ")", "if", "self", ".", "state", "is", "self", ".", "states", ".", "GLOBAL", ":", "return", "signal_decorated", "(", "True", ")", "elif", "self", ".", "state", "is", "self", ".", "states", ".", "DISABLED", ":", "return", "signal_decorated", "(", "False", ")", "conditions_dict", "=", "ConditionsDict", ".", "from_conditions_list", "(", "self", ".", "conditions", ")", "conditions", "=", "conditions_dict", ".", "get_by_input", "(", "inpt", ")", "if", "conditions", ":", "result", "=", "self", ".", "__enabled_func", "(", "cond", ".", "call", "(", "inpt", ")", "for", "cond", "in", "conditions", "if", "cond", ".", "argument", "(", "inpt", ")", ".", "applies", ")", "else", ":", "result", "=", "None", "return", "signal_decorated", "(", "result", ")" ]
Checks to see if this switch is enabled for the provided input. If ``compounded``, all switch conditions must be ``True`` for the switch to be enabled. Otherwise, *any* condition needs to be ``True`` for the switch to be enabled. The switch state is then checked to see if it is ``GLOBAL`` or ``DISABLED``. If it is not, then the switch is ``SELECTIVE`` and each condition is checked. Keyword Arguments: inpt -- An instance of the ``Input`` class.
[ "Checks", "to", "see", "if", "this", "switch", "is", "enabled", "for", "the", "provided", "input", "." ]
train
https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L150-L187
disqus/gutter
gutter/client/models.py
Condition.call
def call(self, inpt): """ Returns if the condition applies to the ``inpt``. If the class ``inpt`` is an instance of is not the same class as the condition's own ``argument``, then ``False`` is returned. This also applies to the ``NONE`` input. Otherwise, ``argument`` is called, with ``inpt`` as the instance and the value is compared to the ``operator`` and the Value is returned. If the condition is ``negative``, then then ``not`` the value is returned. Keyword Arguments: inpt -- An instance of the ``Input`` class. """ if inpt is Manager.NONE_INPUT: return False # Call (construct) the argument with the input object argument_instance = self.argument(inpt) if not argument_instance.applies: return False application = self.__apply(argument_instance, inpt) if self.negative: application = not application return application
python
def call(self, inpt): """ Returns if the condition applies to the ``inpt``. If the class ``inpt`` is an instance of is not the same class as the condition's own ``argument``, then ``False`` is returned. This also applies to the ``NONE`` input. Otherwise, ``argument`` is called, with ``inpt`` as the instance and the value is compared to the ``operator`` and the Value is returned. If the condition is ``negative``, then then ``not`` the value is returned. Keyword Arguments: inpt -- An instance of the ``Input`` class. """ if inpt is Manager.NONE_INPUT: return False # Call (construct) the argument with the input object argument_instance = self.argument(inpt) if not argument_instance.applies: return False application = self.__apply(argument_instance, inpt) if self.negative: application = not application return application
[ "def", "call", "(", "self", ",", "inpt", ")", ":", "if", "inpt", "is", "Manager", ".", "NONE_INPUT", ":", "return", "False", "# Call (construct) the argument with the input object", "argument_instance", "=", "self", ".", "argument", "(", "inpt", ")", "if", "not", "argument_instance", ".", "applies", ":", "return", "False", "application", "=", "self", ".", "__apply", "(", "argument_instance", ",", "inpt", ")", "if", "self", ".", "negative", ":", "application", "=", "not", "application", "return", "application" ]
Returns if the condition applies to the ``inpt``. If the class ``inpt`` is an instance of is not the same class as the condition's own ``argument``, then ``False`` is returned. This also applies to the ``NONE`` input. Otherwise, ``argument`` is called, with ``inpt`` as the instance and the value is compared to the ``operator`` and the Value is returned. If the condition is ``negative``, then then ``not`` the value is returned. Keyword Arguments: inpt -- An instance of the ``Input`` class.
[ "Returns", "if", "the", "condition", "applies", "to", "the", "inpt", "." ]
train
https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L333-L362
disqus/gutter
gutter/client/models.py
Manager.switches
def switches(self): """ List of all switches currently registered. """ results = [ switch for name, switch in self.storage.iteritems() if name.startswith(self.__joined_namespace) ] return results
python
def switches(self): """ List of all switches currently registered. """ results = [ switch for name, switch in self.storage.iteritems() if name.startswith(self.__joined_namespace) ] return results
[ "def", "switches", "(", "self", ")", ":", "results", "=", "[", "switch", "for", "name", ",", "switch", "in", "self", ".", "storage", ".", "iteritems", "(", ")", "if", "name", ".", "startswith", "(", "self", ".", "__joined_namespace", ")", "]", "return", "results" ]
List of all switches currently registered.
[ "List", "of", "all", "switches", "currently", "registered", "." ]
train
https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L438-L447
disqus/gutter
gutter/client/models.py
Manager.switch
def switch(self, name): """ Returns the switch with the provided ``name``. If ``autocreate`` is set to ``True`` and no switch with that name exists, a ``DISABLED`` switch will be with that name. Keyword Arguments: name -- A name of a switch. """ try: switch = self.storage[self.__namespaced(name)] except KeyError: if not self.autocreate: raise ValueError("No switch named '%s' registered in '%s'" % (name, self.namespace)) switch = self.__create_and_register_disabled_switch(name) switch.manager = self return switch
python
def switch(self, name): """ Returns the switch with the provided ``name``. If ``autocreate`` is set to ``True`` and no switch with that name exists, a ``DISABLED`` switch will be with that name. Keyword Arguments: name -- A name of a switch. """ try: switch = self.storage[self.__namespaced(name)] except KeyError: if not self.autocreate: raise ValueError("No switch named '%s' registered in '%s'" % (name, self.namespace)) switch = self.__create_and_register_disabled_switch(name) switch.manager = self return switch
[ "def", "switch", "(", "self", ",", "name", ")", ":", "try", ":", "switch", "=", "self", ".", "storage", "[", "self", ".", "__namespaced", "(", "name", ")", "]", "except", "KeyError", ":", "if", "not", "self", ".", "autocreate", ":", "raise", "ValueError", "(", "\"No switch named '%s' registered in '%s'\"", "%", "(", "name", ",", "self", ".", "namespace", ")", ")", "switch", "=", "self", ".", "__create_and_register_disabled_switch", "(", "name", ")", "switch", ".", "manager", "=", "self", "return", "switch" ]
Returns the switch with the provided ``name``. If ``autocreate`` is set to ``True`` and no switch with that name exists, a ``DISABLED`` switch will be with that name. Keyword Arguments: name -- A name of a switch.
[ "Returns", "the", "switch", "with", "the", "provided", "name", "." ]
train
https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L449-L468
disqus/gutter
gutter/client/models.py
Manager.register
def register(self, switch, signal=signals.switch_registered): ''' Register a switch and persist it to the storage. ''' if not switch.name: raise ValueError('Switch name cannot be blank') switch.manager = self self.__persist(switch) signal.call(switch)
python
def register(self, switch, signal=signals.switch_registered): ''' Register a switch and persist it to the storage. ''' if not switch.name: raise ValueError('Switch name cannot be blank') switch.manager = self self.__persist(switch) signal.call(switch)
[ "def", "register", "(", "self", ",", "switch", ",", "signal", "=", "signals", ".", "switch_registered", ")", ":", "if", "not", "switch", ".", "name", ":", "raise", "ValueError", "(", "'Switch name cannot be blank'", ")", "switch", ".", "manager", "=", "self", "self", ".", "__persist", "(", "switch", ")", "signal", ".", "call", "(", "switch", ")" ]
Register a switch and persist it to the storage.
[ "Register", "a", "switch", "and", "persist", "it", "to", "the", "storage", "." ]
train
https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L479-L489
kaste/mockito-python
mockito/mockito.py
verify
def verify(obj, times=1, atleast=None, atmost=None, between=None, inorder=False): """Central interface to verify interactions. `verify` uses a fluent interface:: verify(<obj>, times=2).<method_name>(<args>) `args` can be as concrete as necessary. Often a catch-all is enough, especially if you're working with strict mocks, bc they throw at call time on unwanted, unconfigured arguments:: from mockito import ANY, ARGS, KWARGS when(manager).add_tasks(1, 2, 3) ... # no need to duplicate the specification; every other argument pattern # would have raised anyway. verify(manager).add_tasks(1, 2, 3) # duplicates `when`call verify(manager).add_tasks(*ARGS) verify(manager).add_tasks(...) # Py3 verify(manager).add_tasks(Ellipsis) # Py2 """ if isinstance(obj, str): obj = get_obj(obj) verification_fn = _get_wanted_verification( times=times, atleast=atleast, atmost=atmost, between=between) if inorder: verification_fn = verification.InOrder(verification_fn) # FIXME?: Catch error if obj is neither a Mock nor a known stubbed obj theMock = _get_mock_or_raise(obj) class Verify(object): def __getattr__(self, method_name): return invocation.VerifiableInvocation( theMock, method_name, verification_fn) return Verify()
python
def verify(obj, times=1, atleast=None, atmost=None, between=None, inorder=False): """Central interface to verify interactions. `verify` uses a fluent interface:: verify(<obj>, times=2).<method_name>(<args>) `args` can be as concrete as necessary. Often a catch-all is enough, especially if you're working with strict mocks, bc they throw at call time on unwanted, unconfigured arguments:: from mockito import ANY, ARGS, KWARGS when(manager).add_tasks(1, 2, 3) ... # no need to duplicate the specification; every other argument pattern # would have raised anyway. verify(manager).add_tasks(1, 2, 3) # duplicates `when`call verify(manager).add_tasks(*ARGS) verify(manager).add_tasks(...) # Py3 verify(manager).add_tasks(Ellipsis) # Py2 """ if isinstance(obj, str): obj = get_obj(obj) verification_fn = _get_wanted_verification( times=times, atleast=atleast, atmost=atmost, between=between) if inorder: verification_fn = verification.InOrder(verification_fn) # FIXME?: Catch error if obj is neither a Mock nor a known stubbed obj theMock = _get_mock_or_raise(obj) class Verify(object): def __getattr__(self, method_name): return invocation.VerifiableInvocation( theMock, method_name, verification_fn) return Verify()
[ "def", "verify", "(", "obj", ",", "times", "=", "1", ",", "atleast", "=", "None", ",", "atmost", "=", "None", ",", "between", "=", "None", ",", "inorder", "=", "False", ")", ":", "if", "isinstance", "(", "obj", ",", "str", ")", ":", "obj", "=", "get_obj", "(", "obj", ")", "verification_fn", "=", "_get_wanted_verification", "(", "times", "=", "times", ",", "atleast", "=", "atleast", ",", "atmost", "=", "atmost", ",", "between", "=", "between", ")", "if", "inorder", ":", "verification_fn", "=", "verification", ".", "InOrder", "(", "verification_fn", ")", "# FIXME?: Catch error if obj is neither a Mock nor a known stubbed obj", "theMock", "=", "_get_mock_or_raise", "(", "obj", ")", "class", "Verify", "(", "object", ")", ":", "def", "__getattr__", "(", "self", ",", "method_name", ")", ":", "return", "invocation", ".", "VerifiableInvocation", "(", "theMock", ",", "method_name", ",", "verification_fn", ")", "return", "Verify", "(", ")" ]
Central interface to verify interactions. `verify` uses a fluent interface:: verify(<obj>, times=2).<method_name>(<args>) `args` can be as concrete as necessary. Often a catch-all is enough, especially if you're working with strict mocks, bc they throw at call time on unwanted, unconfigured arguments:: from mockito import ANY, ARGS, KWARGS when(manager).add_tasks(1, 2, 3) ... # no need to duplicate the specification; every other argument pattern # would have raised anyway. verify(manager).add_tasks(1, 2, 3) # duplicates `when`call verify(manager).add_tasks(*ARGS) verify(manager).add_tasks(...) # Py3 verify(manager).add_tasks(Ellipsis) # Py2
[ "Central", "interface", "to", "verify", "interactions", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L100-L140
kaste/mockito-python
mockito/mockito.py
when
def when(obj, strict=None): """Central interface to stub functions on a given `obj` `obj` should be a module, a class or an instance of a class; it can be a Dummy you created with :func:`mock`. ``when`` exposes a fluent interface where you configure a stub in three steps:: when(<obj>).<method_name>(<args>).thenReturn(<value>) Compared to simple *patching*, stubbing in mockito requires you to specify conrete `args` for which the stub will answer with a concrete `<value>`. All invocations that do not match this specific call signature will be rejected. They usually throw at call time. Stubbing in mockito's sense thus means not only to get rid of unwanted side effects, but effectively to turn function calls into constants. E.g.:: # Given ``dog`` is an instance of a ``Dog`` when(dog).bark('Grrr').thenReturn('Wuff') when(dog).bark('Miau').thenRaise(TypeError()) # With this configuration set up: assert dog.bark('Grrr') == 'Wuff' dog.bark('Miau') # will throw TypeError dog.bark('Wuff') # will throw unwanted interaction Stubbing can effectively be used as monkeypatching; usage shown with the `with` context managing:: with when(os.path).exists('/foo').thenReturn(True): ... Most of the time verifying your interactions is not necessary, because your code under tests implicitly verifies the return value by evaluating it. See :func:`verify` if you need to, see also :func:`expect` to setup expected call counts up front. If your function is pure side effect and does not return something, you can omit the specific answer. The default then is `None`:: when(manager).do_work() `when` verifies the method name, the expected argument signature, and the actual, factual arguments your code under test uses against the original object and its function so its easier to spot changing interfaces. Sometimes it's tedious to spell out all arguments:: from mockito import ANY, ARGS, KWARGS when(requests).get('http://example.com/', **KWARGS).thenReturn(...) when(os.path).exists(ANY) when(os.path).exists(ANY(str)) .. note:: You must :func:`unstub` after stubbing, or use `with` statement. Set ``strict=False`` to bypass the function signature checks. See related :func:`when2` which has a more pythonic interface. """ if isinstance(obj, str): obj = get_obj(obj) if strict is None: strict = True theMock = _get_mock(obj, strict=strict) class When(object): def __getattr__(self, method_name): return invocation.StubbedInvocation( theMock, method_name, strict=strict) return When()
python
def when(obj, strict=None): """Central interface to stub functions on a given `obj` `obj` should be a module, a class or an instance of a class; it can be a Dummy you created with :func:`mock`. ``when`` exposes a fluent interface where you configure a stub in three steps:: when(<obj>).<method_name>(<args>).thenReturn(<value>) Compared to simple *patching*, stubbing in mockito requires you to specify conrete `args` for which the stub will answer with a concrete `<value>`. All invocations that do not match this specific call signature will be rejected. They usually throw at call time. Stubbing in mockito's sense thus means not only to get rid of unwanted side effects, but effectively to turn function calls into constants. E.g.:: # Given ``dog`` is an instance of a ``Dog`` when(dog).bark('Grrr').thenReturn('Wuff') when(dog).bark('Miau').thenRaise(TypeError()) # With this configuration set up: assert dog.bark('Grrr') == 'Wuff' dog.bark('Miau') # will throw TypeError dog.bark('Wuff') # will throw unwanted interaction Stubbing can effectively be used as monkeypatching; usage shown with the `with` context managing:: with when(os.path).exists('/foo').thenReturn(True): ... Most of the time verifying your interactions is not necessary, because your code under tests implicitly verifies the return value by evaluating it. See :func:`verify` if you need to, see also :func:`expect` to setup expected call counts up front. If your function is pure side effect and does not return something, you can omit the specific answer. The default then is `None`:: when(manager).do_work() `when` verifies the method name, the expected argument signature, and the actual, factual arguments your code under test uses against the original object and its function so its easier to spot changing interfaces. Sometimes it's tedious to spell out all arguments:: from mockito import ANY, ARGS, KWARGS when(requests).get('http://example.com/', **KWARGS).thenReturn(...) when(os.path).exists(ANY) when(os.path).exists(ANY(str)) .. note:: You must :func:`unstub` after stubbing, or use `with` statement. Set ``strict=False`` to bypass the function signature checks. See related :func:`when2` which has a more pythonic interface. """ if isinstance(obj, str): obj = get_obj(obj) if strict is None: strict = True theMock = _get_mock(obj, strict=strict) class When(object): def __getattr__(self, method_name): return invocation.StubbedInvocation( theMock, method_name, strict=strict) return When()
[ "def", "when", "(", "obj", ",", "strict", "=", "None", ")", ":", "if", "isinstance", "(", "obj", ",", "str", ")", ":", "obj", "=", "get_obj", "(", "obj", ")", "if", "strict", "is", "None", ":", "strict", "=", "True", "theMock", "=", "_get_mock", "(", "obj", ",", "strict", "=", "strict", ")", "class", "When", "(", "object", ")", ":", "def", "__getattr__", "(", "self", ",", "method_name", ")", ":", "return", "invocation", ".", "StubbedInvocation", "(", "theMock", ",", "method_name", ",", "strict", "=", "strict", ")", "return", "When", "(", ")" ]
Central interface to stub functions on a given `obj` `obj` should be a module, a class or an instance of a class; it can be a Dummy you created with :func:`mock`. ``when`` exposes a fluent interface where you configure a stub in three steps:: when(<obj>).<method_name>(<args>).thenReturn(<value>) Compared to simple *patching*, stubbing in mockito requires you to specify conrete `args` for which the stub will answer with a concrete `<value>`. All invocations that do not match this specific call signature will be rejected. They usually throw at call time. Stubbing in mockito's sense thus means not only to get rid of unwanted side effects, but effectively to turn function calls into constants. E.g.:: # Given ``dog`` is an instance of a ``Dog`` when(dog).bark('Grrr').thenReturn('Wuff') when(dog).bark('Miau').thenRaise(TypeError()) # With this configuration set up: assert dog.bark('Grrr') == 'Wuff' dog.bark('Miau') # will throw TypeError dog.bark('Wuff') # will throw unwanted interaction Stubbing can effectively be used as monkeypatching; usage shown with the `with` context managing:: with when(os.path).exists('/foo').thenReturn(True): ... Most of the time verifying your interactions is not necessary, because your code under tests implicitly verifies the return value by evaluating it. See :func:`verify` if you need to, see also :func:`expect` to setup expected call counts up front. If your function is pure side effect and does not return something, you can omit the specific answer. The default then is `None`:: when(manager).do_work() `when` verifies the method name, the expected argument signature, and the actual, factual arguments your code under test uses against the original object and its function so its easier to spot changing interfaces. Sometimes it's tedious to spell out all arguments:: from mockito import ANY, ARGS, KWARGS when(requests).get('http://example.com/', **KWARGS).thenReturn(...) when(os.path).exists(ANY) when(os.path).exists(ANY(str)) .. note:: You must :func:`unstub` after stubbing, or use `with` statement. Set ``strict=False`` to bypass the function signature checks. See related :func:`when2` which has a more pythonic interface.
[ "Central", "interface", "to", "stub", "functions", "on", "a", "given", "obj" ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L151-L227
kaste/mockito-python
mockito/mockito.py
when2
def when2(fn, *args, **kwargs): """Stub a function call with the given arguments Exposes a more pythonic interface than :func:`when`. See :func:`when` for more documentation. Returns `AnswerSelector` interface which exposes `thenReturn`, `thenRaise`, and `thenAnswer` as usual. Always `strict`. Usage:: # Given `dog` is an instance of a `Dog` when2(dog.bark, 'Miau').thenReturn('Wuff') .. note:: You must :func:`unstub` after stubbing, or use `with` statement. """ obj, name = get_obj_attr_tuple(fn) theMock = _get_mock(obj, strict=True) return invocation.StubbedInvocation(theMock, name)(*args, **kwargs)
python
def when2(fn, *args, **kwargs): """Stub a function call with the given arguments Exposes a more pythonic interface than :func:`when`. See :func:`when` for more documentation. Returns `AnswerSelector` interface which exposes `thenReturn`, `thenRaise`, and `thenAnswer` as usual. Always `strict`. Usage:: # Given `dog` is an instance of a `Dog` when2(dog.bark, 'Miau').thenReturn('Wuff') .. note:: You must :func:`unstub` after stubbing, or use `with` statement. """ obj, name = get_obj_attr_tuple(fn) theMock = _get_mock(obj, strict=True) return invocation.StubbedInvocation(theMock, name)(*args, **kwargs)
[ "def", "when2", "(", "fn", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "obj", ",", "name", "=", "get_obj_attr_tuple", "(", "fn", ")", "theMock", "=", "_get_mock", "(", "obj", ",", "strict", "=", "True", ")", "return", "invocation", ".", "StubbedInvocation", "(", "theMock", ",", "name", ")", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Stub a function call with the given arguments Exposes a more pythonic interface than :func:`when`. See :func:`when` for more documentation. Returns `AnswerSelector` interface which exposes `thenReturn`, `thenRaise`, and `thenAnswer` as usual. Always `strict`. Usage:: # Given `dog` is an instance of a `Dog` when2(dog.bark, 'Miau').thenReturn('Wuff') .. note:: You must :func:`unstub` after stubbing, or use `with` statement.
[ "Stub", "a", "function", "call", "with", "the", "given", "arguments" ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L230-L250
kaste/mockito-python
mockito/mockito.py
patch
def patch(fn, attr_or_replacement, replacement=None): """Patch/Replace a function. This is really like monkeypatching, but *note* that all interactions will be recorded and can be verified. That is, using `patch` you stay in the domain of mockito. Two ways to call this. Either:: patch(os.path.exists, lambda str: True) # two arguments # OR patch(os.path, 'exists', lambda str: True) # three arguments If called with three arguments, the mode is *not* strict to allow *adding* methods. If called with two arguments, mode is always `strict`. .. note:: You must :func:`unstub` after stubbing, or use `with` statement. """ if replacement is None: replacement = attr_or_replacement return when2(fn, Ellipsis).thenAnswer(replacement) else: obj, name = fn, attr_or_replacement theMock = _get_mock(obj, strict=True) return invocation.StubbedInvocation( theMock, name, strict=False)(Ellipsis).thenAnswer(replacement)
python
def patch(fn, attr_or_replacement, replacement=None): """Patch/Replace a function. This is really like monkeypatching, but *note* that all interactions will be recorded and can be verified. That is, using `patch` you stay in the domain of mockito. Two ways to call this. Either:: patch(os.path.exists, lambda str: True) # two arguments # OR patch(os.path, 'exists', lambda str: True) # three arguments If called with three arguments, the mode is *not* strict to allow *adding* methods. If called with two arguments, mode is always `strict`. .. note:: You must :func:`unstub` after stubbing, or use `with` statement. """ if replacement is None: replacement = attr_or_replacement return when2(fn, Ellipsis).thenAnswer(replacement) else: obj, name = fn, attr_or_replacement theMock = _get_mock(obj, strict=True) return invocation.StubbedInvocation( theMock, name, strict=False)(Ellipsis).thenAnswer(replacement)
[ "def", "patch", "(", "fn", ",", "attr_or_replacement", ",", "replacement", "=", "None", ")", ":", "if", "replacement", "is", "None", ":", "replacement", "=", "attr_or_replacement", "return", "when2", "(", "fn", ",", "Ellipsis", ")", ".", "thenAnswer", "(", "replacement", ")", "else", ":", "obj", ",", "name", "=", "fn", ",", "attr_or_replacement", "theMock", "=", "_get_mock", "(", "obj", ",", "strict", "=", "True", ")", "return", "invocation", ".", "StubbedInvocation", "(", "theMock", ",", "name", ",", "strict", "=", "False", ")", "(", "Ellipsis", ")", ".", "thenAnswer", "(", "replacement", ")" ]
Patch/Replace a function. This is really like monkeypatching, but *note* that all interactions will be recorded and can be verified. That is, using `patch` you stay in the domain of mockito. Two ways to call this. Either:: patch(os.path.exists, lambda str: True) # two arguments # OR patch(os.path, 'exists', lambda str: True) # three arguments If called with three arguments, the mode is *not* strict to allow *adding* methods. If called with two arguments, mode is always `strict`. .. note:: You must :func:`unstub` after stubbing, or use `with` statement.
[ "Patch", "/", "Replace", "a", "function", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L253-L280
kaste/mockito-python
mockito/mockito.py
expect
def expect(obj, strict=None, times=None, atleast=None, atmost=None, between=None): """Stub a function call, and set up an expected call count. Usage:: # Given `dog` is an instance of a `Dog` expect(dog, times=1).bark('Wuff').thenReturn('Miau') dog.bark('Wuff') dog.bark('Wuff') # will throw at call time: too many invocations # maybe if you need to ensure that `dog.bark()` was called at all verifyNoUnwantedInteractions() .. note:: You must :func:`unstub` after stubbing, or use `with` statement. See :func:`when`, :func:`when2`, :func:`verifyNoUnwantedInteractions` """ if strict is None: strict = True theMock = _get_mock(obj, strict=strict) verification_fn = _get_wanted_verification( times=times, atleast=atleast, atmost=atmost, between=between) class Expect(object): def __getattr__(self, method_name): return invocation.StubbedInvocation( theMock, method_name, verification=verification_fn, strict=strict) return Expect()
python
def expect(obj, strict=None, times=None, atleast=None, atmost=None, between=None): """Stub a function call, and set up an expected call count. Usage:: # Given `dog` is an instance of a `Dog` expect(dog, times=1).bark('Wuff').thenReturn('Miau') dog.bark('Wuff') dog.bark('Wuff') # will throw at call time: too many invocations # maybe if you need to ensure that `dog.bark()` was called at all verifyNoUnwantedInteractions() .. note:: You must :func:`unstub` after stubbing, or use `with` statement. See :func:`when`, :func:`when2`, :func:`verifyNoUnwantedInteractions` """ if strict is None: strict = True theMock = _get_mock(obj, strict=strict) verification_fn = _get_wanted_verification( times=times, atleast=atleast, atmost=atmost, between=between) class Expect(object): def __getattr__(self, method_name): return invocation.StubbedInvocation( theMock, method_name, verification=verification_fn, strict=strict) return Expect()
[ "def", "expect", "(", "obj", ",", "strict", "=", "None", ",", "times", "=", "None", ",", "atleast", "=", "None", ",", "atmost", "=", "None", ",", "between", "=", "None", ")", ":", "if", "strict", "is", "None", ":", "strict", "=", "True", "theMock", "=", "_get_mock", "(", "obj", ",", "strict", "=", "strict", ")", "verification_fn", "=", "_get_wanted_verification", "(", "times", "=", "times", ",", "atleast", "=", "atleast", ",", "atmost", "=", "atmost", ",", "between", "=", "between", ")", "class", "Expect", "(", "object", ")", ":", "def", "__getattr__", "(", "self", ",", "method_name", ")", ":", "return", "invocation", ".", "StubbedInvocation", "(", "theMock", ",", "method_name", ",", "verification", "=", "verification_fn", ",", "strict", "=", "strict", ")", "return", "Expect", "(", ")" ]
Stub a function call, and set up an expected call count. Usage:: # Given `dog` is an instance of a `Dog` expect(dog, times=1).bark('Wuff').thenReturn('Miau') dog.bark('Wuff') dog.bark('Wuff') # will throw at call time: too many invocations # maybe if you need to ensure that `dog.bark()` was called at all verifyNoUnwantedInteractions() .. note:: You must :func:`unstub` after stubbing, or use `with` statement. See :func:`when`, :func:`when2`, :func:`verifyNoUnwantedInteractions`
[ "Stub", "a", "function", "call", "and", "set", "up", "an", "expected", "call", "count", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L284-L317
kaste/mockito-python
mockito/mockito.py
unstub
def unstub(*objs): """Unstubs all stubbed methods and functions If you don't pass in any argument, *all* registered mocks and patched modules, classes etc. will be unstubbed. Note that additionally, the underlying registry will be cleaned. After an `unstub` you can't :func:`verify` anymore because all interactions will be forgotten. """ if objs: for obj in objs: mock_registry.unstub(obj) else: mock_registry.unstub_all()
python
def unstub(*objs): """Unstubs all stubbed methods and functions If you don't pass in any argument, *all* registered mocks and patched modules, classes etc. will be unstubbed. Note that additionally, the underlying registry will be cleaned. After an `unstub` you can't :func:`verify` anymore because all interactions will be forgotten. """ if objs: for obj in objs: mock_registry.unstub(obj) else: mock_registry.unstub_all()
[ "def", "unstub", "(", "*", "objs", ")", ":", "if", "objs", ":", "for", "obj", "in", "objs", ":", "mock_registry", ".", "unstub", "(", "obj", ")", "else", ":", "mock_registry", ".", "unstub_all", "(", ")" ]
Unstubs all stubbed methods and functions If you don't pass in any argument, *all* registered mocks and patched modules, classes etc. will be unstubbed. Note that additionally, the underlying registry will be cleaned. After an `unstub` you can't :func:`verify` anymore because all interactions will be forgotten.
[ "Unstubs", "all", "stubbed", "methods", "and", "functions" ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L321-L336
kaste/mockito-python
mockito/mockito.py
verifyZeroInteractions
def verifyZeroInteractions(*objs): """Verify that no methods have been called on given objs. Note that strict mocks usually throw early on unexpected, unstubbed invocations. Partial mocks ('monkeypatched' objects or modules) do not support this functionality at all, bc only for the stubbed invocations the actual usage gets recorded. So this function is of limited use, nowadays. """ for obj in objs: theMock = _get_mock_or_raise(obj) if len(theMock.invocations) > 0: raise VerificationError( "\nUnwanted interaction: %s" % theMock.invocations[0])
python
def verifyZeroInteractions(*objs): """Verify that no methods have been called on given objs. Note that strict mocks usually throw early on unexpected, unstubbed invocations. Partial mocks ('monkeypatched' objects or modules) do not support this functionality at all, bc only for the stubbed invocations the actual usage gets recorded. So this function is of limited use, nowadays. """ for obj in objs: theMock = _get_mock_or_raise(obj) if len(theMock.invocations) > 0: raise VerificationError( "\nUnwanted interaction: %s" % theMock.invocations[0])
[ "def", "verifyZeroInteractions", "(", "*", "objs", ")", ":", "for", "obj", "in", "objs", ":", "theMock", "=", "_get_mock_or_raise", "(", "obj", ")", "if", "len", "(", "theMock", ".", "invocations", ")", ">", "0", ":", "raise", "VerificationError", "(", "\"\\nUnwanted interaction: %s\"", "%", "theMock", ".", "invocations", "[", "0", "]", ")" ]
Verify that no methods have been called on given objs. Note that strict mocks usually throw early on unexpected, unstubbed invocations. Partial mocks ('monkeypatched' objects or modules) do not support this functionality at all, bc only for the stubbed invocations the actual usage gets recorded. So this function is of limited use, nowadays.
[ "Verify", "that", "no", "methods", "have", "been", "called", "on", "given", "objs", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L364-L379
kaste/mockito-python
mockito/mockito.py
verifyNoUnwantedInteractions
def verifyNoUnwantedInteractions(*objs): """Verifies that expectations set via `expect` are met E.g.:: expect(os.path, times=1).exists(...).thenReturn(True) os.path('/foo') verifyNoUnwantedInteractions(os.path) # ok, called once If you leave out the argument *all* registered objects will be checked. .. note:: **DANGERZONE**: If you did not :func:`unstub` correctly, it is possible that old registered mocks, from other tests leak. See related :func:`expect` """ if objs: theMocks = map(_get_mock_or_raise, objs) else: theMocks = mock_registry.get_registered_mocks() for mock in theMocks: for i in mock.stubbed_invocations: i.verify()
python
def verifyNoUnwantedInteractions(*objs): """Verifies that expectations set via `expect` are met E.g.:: expect(os.path, times=1).exists(...).thenReturn(True) os.path('/foo') verifyNoUnwantedInteractions(os.path) # ok, called once If you leave out the argument *all* registered objects will be checked. .. note:: **DANGERZONE**: If you did not :func:`unstub` correctly, it is possible that old registered mocks, from other tests leak. See related :func:`expect` """ if objs: theMocks = map(_get_mock_or_raise, objs) else: theMocks = mock_registry.get_registered_mocks() for mock in theMocks: for i in mock.stubbed_invocations: i.verify()
[ "def", "verifyNoUnwantedInteractions", "(", "*", "objs", ")", ":", "if", "objs", ":", "theMocks", "=", "map", "(", "_get_mock_or_raise", ",", "objs", ")", "else", ":", "theMocks", "=", "mock_registry", ".", "get_registered_mocks", "(", ")", "for", "mock", "in", "theMocks", ":", "for", "i", "in", "mock", ".", "stubbed_invocations", ":", "i", ".", "verify", "(", ")" ]
Verifies that expectations set via `expect` are met E.g.:: expect(os.path, times=1).exists(...).thenReturn(True) os.path('/foo') verifyNoUnwantedInteractions(os.path) # ok, called once If you leave out the argument *all* registered objects will be checked. .. note:: **DANGERZONE**: If you did not :func:`unstub` correctly, it is possible that old registered mocks, from other tests leak. See related :func:`expect`
[ "Verifies", "that", "expectations", "set", "via", "expect", "are", "met" ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L383-L409
kaste/mockito-python
mockito/mockito.py
verifyStubbedInvocationsAreUsed
def verifyStubbedInvocationsAreUsed(*objs): """Ensure stubs are actually used. This functions just ensures that stubbed methods are actually used. Its purpose is to detect interface changes after refactorings. It is meant to be invoked usually without arguments just before :func:`unstub`. """ if objs: theMocks = map(_get_mock_or_raise, objs) else: theMocks = mock_registry.get_registered_mocks() for mock in theMocks: for i in mock.stubbed_invocations: if not i.allow_zero_invocations and i.used < len(i.answers): raise VerificationError("\nUnused stub: %s" % i)
python
def verifyStubbedInvocationsAreUsed(*objs): """Ensure stubs are actually used. This functions just ensures that stubbed methods are actually used. Its purpose is to detect interface changes after refactorings. It is meant to be invoked usually without arguments just before :func:`unstub`. """ if objs: theMocks = map(_get_mock_or_raise, objs) else: theMocks = mock_registry.get_registered_mocks() for mock in theMocks: for i in mock.stubbed_invocations: if not i.allow_zero_invocations and i.used < len(i.answers): raise VerificationError("\nUnused stub: %s" % i)
[ "def", "verifyStubbedInvocationsAreUsed", "(", "*", "objs", ")", ":", "if", "objs", ":", "theMocks", "=", "map", "(", "_get_mock_or_raise", ",", "objs", ")", "else", ":", "theMocks", "=", "mock_registry", ".", "get_registered_mocks", "(", ")", "for", "mock", "in", "theMocks", ":", "for", "i", "in", "mock", ".", "stubbed_invocations", ":", "if", "not", "i", ".", "allow_zero_invocations", "and", "i", ".", "used", "<", "len", "(", "i", ".", "answers", ")", ":", "raise", "VerificationError", "(", "\"\\nUnused stub: %s\"", "%", "i", ")" ]
Ensure stubs are actually used. This functions just ensures that stubbed methods are actually used. Its purpose is to detect interface changes after refactorings. It is meant to be invoked usually without arguments just before :func:`unstub`.
[ "Ensure", "stubs", "are", "actually", "used", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L411-L428
kaste/mockito-python
mockito/utils.py
get_function_host
def get_function_host(fn): """Destructure a given function into its host and its name. The 'host' of a function is a module, for methods it is usually its instance or its class. This is safe only for methods, for module wide, globally declared names it must be considered experimental. For all reasonable fn: ``getattr(*get_function_host(fn)) == fn`` Returns tuple (host, fn-name) Otherwise should raise TypeError """ obj = None try: name = fn.__name__ obj = fn.__self__ except AttributeError: pass if obj is None: # Due to how python imports work, everything that is global on a module # level must be regarded as not safe here. For now, we go for the extra # mile, TBC, because just specifying `os.path.exists` would be 'cool'. # # TLDR;: # E.g. `inspect.getmodule(os.path.exists)` returns `genericpath` bc # that's where `exists` is defined and comes from. But from the point # of view of the user `exists` always comes and is used from `os.path` # which points e.g. to `ntpath`. We thus must patch `ntpath`. # But that's the same for most imports:: # # # b.py # from a import foo # # Now asking `getmodule(b.foo)` it tells you `a`, but we access and use # `b.foo` and we therefore must patch `b`. obj, name = find_invoking_frame_and_try_parse() # safety check! assert getattr(obj, name) == fn return obj, name
python
def get_function_host(fn): """Destructure a given function into its host and its name. The 'host' of a function is a module, for methods it is usually its instance or its class. This is safe only for methods, for module wide, globally declared names it must be considered experimental. For all reasonable fn: ``getattr(*get_function_host(fn)) == fn`` Returns tuple (host, fn-name) Otherwise should raise TypeError """ obj = None try: name = fn.__name__ obj = fn.__self__ except AttributeError: pass if obj is None: # Due to how python imports work, everything that is global on a module # level must be regarded as not safe here. For now, we go for the extra # mile, TBC, because just specifying `os.path.exists` would be 'cool'. # # TLDR;: # E.g. `inspect.getmodule(os.path.exists)` returns `genericpath` bc # that's where `exists` is defined and comes from. But from the point # of view of the user `exists` always comes and is used from `os.path` # which points e.g. to `ntpath`. We thus must patch `ntpath`. # But that's the same for most imports:: # # # b.py # from a import foo # # Now asking `getmodule(b.foo)` it tells you `a`, but we access and use # `b.foo` and we therefore must patch `b`. obj, name = find_invoking_frame_and_try_parse() # safety check! assert getattr(obj, name) == fn return obj, name
[ "def", "get_function_host", "(", "fn", ")", ":", "obj", "=", "None", "try", ":", "name", "=", "fn", ".", "__name__", "obj", "=", "fn", ".", "__self__", "except", "AttributeError", ":", "pass", "if", "obj", "is", "None", ":", "# Due to how python imports work, everything that is global on a module", "# level must be regarded as not safe here. For now, we go for the extra", "# mile, TBC, because just specifying `os.path.exists` would be 'cool'.", "#", "# TLDR;:", "# E.g. `inspect.getmodule(os.path.exists)` returns `genericpath` bc", "# that's where `exists` is defined and comes from. But from the point", "# of view of the user `exists` always comes and is used from `os.path`", "# which points e.g. to `ntpath`. We thus must patch `ntpath`.", "# But that's the same for most imports::", "#", "# # b.py", "# from a import foo", "#", "# Now asking `getmodule(b.foo)` it tells you `a`, but we access and use", "# `b.foo` and we therefore must patch `b`.", "obj", ",", "name", "=", "find_invoking_frame_and_try_parse", "(", ")", "# safety check!", "assert", "getattr", "(", "obj", ",", "name", ")", "==", "fn", "return", "obj", ",", "name" ]
Destructure a given function into its host and its name. The 'host' of a function is a module, for methods it is usually its instance or its class. This is safe only for methods, for module wide, globally declared names it must be considered experimental. For all reasonable fn: ``getattr(*get_function_host(fn)) == fn`` Returns tuple (host, fn-name) Otherwise should raise TypeError
[ "Destructure", "a", "given", "function", "into", "its", "host", "and", "its", "name", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/utils.py#L23-L66
kaste/mockito-python
mockito/utils.py
get_obj
def get_obj(path): """Return obj for given dotted path. Typical inputs for `path` are 'os' or 'os.path' in which case you get a module; or 'os.path.exists' in which case you get a function from that module. Just returns the given input in case it is not a str. Note: Relative imports not supported. Raises ImportError or AttributeError as appropriate. """ # Since we usually pass in mocks here; duck typing is not appropriate # (mocks respond to every attribute). if not isinstance(path, str): return path if path.startswith('.'): raise TypeError('relative imports are not supported') parts = path.split('.') head, tail = parts[0], parts[1:] obj = importlib.import_module(head) # Normally a simple reduce, but we go the extra mile # for good exception messages. for i, name in enumerate(tail): try: obj = getattr(obj, name) except AttributeError: # Note the [:i] instead of [:i+1], so we get the path just # *before* the AttributeError, t.i. the part of it that went ok. module = '.'.join([head] + tail[:i]) try: importlib.import_module(module) except ImportError: raise AttributeError( "object '%s' has no attribute '%s'" % (module, name)) else: raise AttributeError( "module '%s' has no attribute '%s'" % (module, name)) return obj
python
def get_obj(path): """Return obj for given dotted path. Typical inputs for `path` are 'os' or 'os.path' in which case you get a module; or 'os.path.exists' in which case you get a function from that module. Just returns the given input in case it is not a str. Note: Relative imports not supported. Raises ImportError or AttributeError as appropriate. """ # Since we usually pass in mocks here; duck typing is not appropriate # (mocks respond to every attribute). if not isinstance(path, str): return path if path.startswith('.'): raise TypeError('relative imports are not supported') parts = path.split('.') head, tail = parts[0], parts[1:] obj = importlib.import_module(head) # Normally a simple reduce, but we go the extra mile # for good exception messages. for i, name in enumerate(tail): try: obj = getattr(obj, name) except AttributeError: # Note the [:i] instead of [:i+1], so we get the path just # *before* the AttributeError, t.i. the part of it that went ok. module = '.'.join([head] + tail[:i]) try: importlib.import_module(module) except ImportError: raise AttributeError( "object '%s' has no attribute '%s'" % (module, name)) else: raise AttributeError( "module '%s' has no attribute '%s'" % (module, name)) return obj
[ "def", "get_obj", "(", "path", ")", ":", "# Since we usually pass in mocks here; duck typing is not appropriate", "# (mocks respond to every attribute).", "if", "not", "isinstance", "(", "path", ",", "str", ")", ":", "return", "path", "if", "path", ".", "startswith", "(", "'.'", ")", ":", "raise", "TypeError", "(", "'relative imports are not supported'", ")", "parts", "=", "path", ".", "split", "(", "'.'", ")", "head", ",", "tail", "=", "parts", "[", "0", "]", ",", "parts", "[", "1", ":", "]", "obj", "=", "importlib", ".", "import_module", "(", "head", ")", "# Normally a simple reduce, but we go the extra mile", "# for good exception messages.", "for", "i", ",", "name", "in", "enumerate", "(", "tail", ")", ":", "try", ":", "obj", "=", "getattr", "(", "obj", ",", "name", ")", "except", "AttributeError", ":", "# Note the [:i] instead of [:i+1], so we get the path just", "# *before* the AttributeError, t.i. the part of it that went ok.", "module", "=", "'.'", ".", "join", "(", "[", "head", "]", "+", "tail", "[", ":", "i", "]", ")", "try", ":", "importlib", ".", "import_module", "(", "module", ")", "except", "ImportError", ":", "raise", "AttributeError", "(", "\"object '%s' has no attribute '%s'\"", "%", "(", "module", ",", "name", ")", ")", "else", ":", "raise", "AttributeError", "(", "\"module '%s' has no attribute '%s'\"", "%", "(", "module", ",", "name", ")", ")", "return", "obj" ]
Return obj for given dotted path. Typical inputs for `path` are 'os' or 'os.path' in which case you get a module; or 'os.path.exists' in which case you get a function from that module. Just returns the given input in case it is not a str. Note: Relative imports not supported. Raises ImportError or AttributeError as appropriate.
[ "Return", "obj", "for", "given", "dotted", "path", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/utils.py#L107-L150
kaste/mockito-python
mockito/utils.py
get_obj_attr_tuple
def get_obj_attr_tuple(path): """Split path into (obj, attribute) tuple. Given `path` is 'os.path.exists' will thus return `(os.path, 'exists')` If path is not a str, delegates to `get_function_host(path)` """ if not isinstance(path, str): return get_function_host(path) if path.startswith('.'): raise TypeError('relative imports are not supported') try: leading, end = path.rsplit('.', 1) except ValueError: raise TypeError('path must have dots') return get_obj(leading), end
python
def get_obj_attr_tuple(path): """Split path into (obj, attribute) tuple. Given `path` is 'os.path.exists' will thus return `(os.path, 'exists')` If path is not a str, delegates to `get_function_host(path)` """ if not isinstance(path, str): return get_function_host(path) if path.startswith('.'): raise TypeError('relative imports are not supported') try: leading, end = path.rsplit('.', 1) except ValueError: raise TypeError('path must have dots') return get_obj(leading), end
[ "def", "get_obj_attr_tuple", "(", "path", ")", ":", "if", "not", "isinstance", "(", "path", ",", "str", ")", ":", "return", "get_function_host", "(", "path", ")", "if", "path", ".", "startswith", "(", "'.'", ")", ":", "raise", "TypeError", "(", "'relative imports are not supported'", ")", "try", ":", "leading", ",", "end", "=", "path", ".", "rsplit", "(", "'.'", ",", "1", ")", "except", "ValueError", ":", "raise", "TypeError", "(", "'path must have dots'", ")", "return", "get_obj", "(", "leading", ")", ",", "end" ]
Split path into (obj, attribute) tuple. Given `path` is 'os.path.exists' will thus return `(os.path, 'exists')` If path is not a str, delegates to `get_function_host(path)`
[ "Split", "path", "into", "(", "obj", "attribute", ")", "tuple", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/utils.py#L152-L171
kaste/mockito-python
mockito/spying.py
spy
def spy(object): """Spy an object. Spying means that all functions will behave as before, so they will be side effects, but the interactions can be verified afterwards. Returns Dummy-like, almost empty object as proxy to `object`. The *returned* object must be injected and used by the code under test; after that all interactions can be verified as usual. T.i. the original object **will not be patched**, and has no further knowledge as before. E.g.:: import time time = spy(time) # inject time do_work(..., time) verify(time).time() """ if inspect.isclass(object) or inspect.ismodule(object): class_ = None else: class_ = object.__class__ class Spy(_Dummy): if class_: __class__ = class_ def __getattr__(self, method_name): return RememberedProxyInvocation(theMock, method_name) def __repr__(self): name = 'Spied' if class_: name += class_.__name__ return "<%s id=%s>" % (name, id(self)) obj = Spy() theMock = Mock(obj, strict=True, spec=object) mock_registry.register(obj, theMock) return obj
python
def spy(object): """Spy an object. Spying means that all functions will behave as before, so they will be side effects, but the interactions can be verified afterwards. Returns Dummy-like, almost empty object as proxy to `object`. The *returned* object must be injected and used by the code under test; after that all interactions can be verified as usual. T.i. the original object **will not be patched**, and has no further knowledge as before. E.g.:: import time time = spy(time) # inject time do_work(..., time) verify(time).time() """ if inspect.isclass(object) or inspect.ismodule(object): class_ = None else: class_ = object.__class__ class Spy(_Dummy): if class_: __class__ = class_ def __getattr__(self, method_name): return RememberedProxyInvocation(theMock, method_name) def __repr__(self): name = 'Spied' if class_: name += class_.__name__ return "<%s id=%s>" % (name, id(self)) obj = Spy() theMock = Mock(obj, strict=True, spec=object) mock_registry.register(obj, theMock) return obj
[ "def", "spy", "(", "object", ")", ":", "if", "inspect", ".", "isclass", "(", "object", ")", "or", "inspect", ".", "ismodule", "(", "object", ")", ":", "class_", "=", "None", "else", ":", "class_", "=", "object", ".", "__class__", "class", "Spy", "(", "_Dummy", ")", ":", "if", "class_", ":", "__class__", "=", "class_", "def", "__getattr__", "(", "self", ",", "method_name", ")", ":", "return", "RememberedProxyInvocation", "(", "theMock", ",", "method_name", ")", "def", "__repr__", "(", "self", ")", ":", "name", "=", "'Spied'", "if", "class_", ":", "name", "+=", "class_", ".", "__name__", "return", "\"<%s id=%s>\"", "%", "(", "name", ",", "id", "(", "self", ")", ")", "obj", "=", "Spy", "(", ")", "theMock", "=", "Mock", "(", "obj", ",", "strict", "=", "True", ",", "spec", "=", "object", ")", "mock_registry", ".", "register", "(", "obj", ",", "theMock", ")", "return", "obj" ]
Spy an object. Spying means that all functions will behave as before, so they will be side effects, but the interactions can be verified afterwards. Returns Dummy-like, almost empty object as proxy to `object`. The *returned* object must be injected and used by the code under test; after that all interactions can be verified as usual. T.i. the original object **will not be patched**, and has no further knowledge as before. E.g.:: import time time = spy(time) # inject time do_work(..., time) verify(time).time()
[ "Spy", "an", "object", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/spying.py#L33-L78
kaste/mockito-python
mockito/spying.py
spy2
def spy2(fn): # type: (...) -> None """Spy usage of given `fn`. Patches the module, class or object `fn` lives in, so that all interactions can be recorded; otherwise executes `fn` as before, so that all side effects happen as before. E.g.:: import time spy(time.time) do_work(...) # nothing injected, uses global patched `time` module verify(time).time() Note that builtins often cannot be patched because they're read-only. """ if isinstance(fn, str): answer = get_obj(fn) else: answer = fn when2(fn, Ellipsis).thenAnswer(answer)
python
def spy2(fn): # type: (...) -> None """Spy usage of given `fn`. Patches the module, class or object `fn` lives in, so that all interactions can be recorded; otherwise executes `fn` as before, so that all side effects happen as before. E.g.:: import time spy(time.time) do_work(...) # nothing injected, uses global patched `time` module verify(time).time() Note that builtins often cannot be patched because they're read-only. """ if isinstance(fn, str): answer = get_obj(fn) else: answer = fn when2(fn, Ellipsis).thenAnswer(answer)
[ "def", "spy2", "(", "fn", ")", ":", "# type: (...) -> None", "if", "isinstance", "(", "fn", ",", "str", ")", ":", "answer", "=", "get_obj", "(", "fn", ")", "else", ":", "answer", "=", "fn", "when2", "(", "fn", ",", "Ellipsis", ")", ".", "thenAnswer", "(", "answer", ")" ]
Spy usage of given `fn`. Patches the module, class or object `fn` lives in, so that all interactions can be recorded; otherwise executes `fn` as before, so that all side effects happen as before. E.g.:: import time spy(time.time) do_work(...) # nothing injected, uses global patched `time` module verify(time).time() Note that builtins often cannot be patched because they're read-only.
[ "Spy", "usage", "of", "given", "fn", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/spying.py#L81-L104
kaste/mockito-python
mockito/mocking.py
mock
def mock(config_or_spec=None, spec=None, strict=OMITTED): """Create 'empty' objects ('Mocks'). Will create an empty unconfigured object, that you can pass around. All interactions (method calls) will be recorded and can be verified using :func:`verify` et.al. A plain `mock()` will be not `strict`, and thus all methods regardless of the arguments will return ``None``. .. note:: Technically all attributes will return an internal interface. Because of that a simple ``if mock().foo:`` will surprisingly pass. If you set strict to ``True``: ``mock(strict=True)`` all unexpected interactions will raise an error instead. You configure a mock using :func:`when`, :func:`when2` or :func:`expect`. You can also very conveniently just pass in a dict here:: response = mock({'text': 'ok', 'raise_for_status': lambda: None}) You can also create an empty Mock which is specced against a given `spec`: ``mock(requests.Response)``. These mock are by default strict, thus they raise if you want to stub a method, the spec does not implement. Mockito will also match the function signature. You can pre-configure a specced mock as well:: response = mock({'json': lambda: {'status': 'Ok'}}, spec=requests.Response) Mocks are by default callable. Configure the callable behavior using `when`:: dummy = mock() when(dummy).__call_(1).thenReturn(2) All other magic methods must be configured this way or they will raise an AttributeError. See :func:`verify` to verify your interactions after usage. """ if type(config_or_spec) is dict: config = config_or_spec else: config = {} spec = config_or_spec if strict is OMITTED: strict = False if spec is None else True class Dummy(_Dummy): if spec: __class__ = spec # make isinstance work def __getattr__(self, method_name): if strict: raise AttributeError( "'Dummy' has no attribute %r configured" % method_name) return functools.partial( remembered_invocation_builder, theMock, method_name) def __repr__(self): name = 'Dummy' if spec: name += spec.__name__ return "<%s id=%s>" % (name, id(self)) # That's a tricky one: The object we will return is an *instance* of our # Dummy class, but the mock we register will point and patch the class. # T.i. so that magic methods (`__call__` etc.) can be configured. obj = Dummy() theMock = Mock(Dummy, strict=strict, spec=spec) for n, v in config.items(): if inspect.isfunction(v): invocation.StubbedInvocation(theMock, n)(Ellipsis).thenAnswer(v) else: setattr(obj, n, v) mock_registry.register(obj, theMock) return obj
python
def mock(config_or_spec=None, spec=None, strict=OMITTED): """Create 'empty' objects ('Mocks'). Will create an empty unconfigured object, that you can pass around. All interactions (method calls) will be recorded and can be verified using :func:`verify` et.al. A plain `mock()` will be not `strict`, and thus all methods regardless of the arguments will return ``None``. .. note:: Technically all attributes will return an internal interface. Because of that a simple ``if mock().foo:`` will surprisingly pass. If you set strict to ``True``: ``mock(strict=True)`` all unexpected interactions will raise an error instead. You configure a mock using :func:`when`, :func:`when2` or :func:`expect`. You can also very conveniently just pass in a dict here:: response = mock({'text': 'ok', 'raise_for_status': lambda: None}) You can also create an empty Mock which is specced against a given `spec`: ``mock(requests.Response)``. These mock are by default strict, thus they raise if you want to stub a method, the spec does not implement. Mockito will also match the function signature. You can pre-configure a specced mock as well:: response = mock({'json': lambda: {'status': 'Ok'}}, spec=requests.Response) Mocks are by default callable. Configure the callable behavior using `when`:: dummy = mock() when(dummy).__call_(1).thenReturn(2) All other magic methods must be configured this way or they will raise an AttributeError. See :func:`verify` to verify your interactions after usage. """ if type(config_or_spec) is dict: config = config_or_spec else: config = {} spec = config_or_spec if strict is OMITTED: strict = False if spec is None else True class Dummy(_Dummy): if spec: __class__ = spec # make isinstance work def __getattr__(self, method_name): if strict: raise AttributeError( "'Dummy' has no attribute %r configured" % method_name) return functools.partial( remembered_invocation_builder, theMock, method_name) def __repr__(self): name = 'Dummy' if spec: name += spec.__name__ return "<%s id=%s>" % (name, id(self)) # That's a tricky one: The object we will return is an *instance* of our # Dummy class, but the mock we register will point and patch the class. # T.i. so that magic methods (`__call__` etc.) can be configured. obj = Dummy() theMock = Mock(Dummy, strict=strict, spec=spec) for n, v in config.items(): if inspect.isfunction(v): invocation.StubbedInvocation(theMock, n)(Ellipsis).thenAnswer(v) else: setattr(obj, n, v) mock_registry.register(obj, theMock) return obj
[ "def", "mock", "(", "config_or_spec", "=", "None", ",", "spec", "=", "None", ",", "strict", "=", "OMITTED", ")", ":", "if", "type", "(", "config_or_spec", ")", "is", "dict", ":", "config", "=", "config_or_spec", "else", ":", "config", "=", "{", "}", "spec", "=", "config_or_spec", "if", "strict", "is", "OMITTED", ":", "strict", "=", "False", "if", "spec", "is", "None", "else", "True", "class", "Dummy", "(", "_Dummy", ")", ":", "if", "spec", ":", "__class__", "=", "spec", "# make isinstance work", "def", "__getattr__", "(", "self", ",", "method_name", ")", ":", "if", "strict", ":", "raise", "AttributeError", "(", "\"'Dummy' has no attribute %r configured\"", "%", "method_name", ")", "return", "functools", ".", "partial", "(", "remembered_invocation_builder", ",", "theMock", ",", "method_name", ")", "def", "__repr__", "(", "self", ")", ":", "name", "=", "'Dummy'", "if", "spec", ":", "name", "+=", "spec", ".", "__name__", "return", "\"<%s id=%s>\"", "%", "(", "name", ",", "id", "(", "self", ")", ")", "# That's a tricky one: The object we will return is an *instance* of our", "# Dummy class, but the mock we register will point and patch the class.", "# T.i. so that magic methods (`__call__` etc.) can be configured.", "obj", "=", "Dummy", "(", ")", "theMock", "=", "Mock", "(", "Dummy", ",", "strict", "=", "strict", ",", "spec", "=", "spec", ")", "for", "n", ",", "v", "in", "config", ".", "items", "(", ")", ":", "if", "inspect", ".", "isfunction", "(", "v", ")", ":", "invocation", ".", "StubbedInvocation", "(", "theMock", ",", "n", ")", "(", "Ellipsis", ")", ".", "thenAnswer", "(", "v", ")", "else", ":", "setattr", "(", "obj", ",", "n", ",", "v", ")", "mock_registry", ".", "register", "(", "obj", ",", "theMock", ")", "return", "obj" ]
Create 'empty' objects ('Mocks'). Will create an empty unconfigured object, that you can pass around. All interactions (method calls) will be recorded and can be verified using :func:`verify` et.al. A plain `mock()` will be not `strict`, and thus all methods regardless of the arguments will return ``None``. .. note:: Technically all attributes will return an internal interface. Because of that a simple ``if mock().foo:`` will surprisingly pass. If you set strict to ``True``: ``mock(strict=True)`` all unexpected interactions will raise an error instead. You configure a mock using :func:`when`, :func:`when2` or :func:`expect`. You can also very conveniently just pass in a dict here:: response = mock({'text': 'ok', 'raise_for_status': lambda: None}) You can also create an empty Mock which is specced against a given `spec`: ``mock(requests.Response)``. These mock are by default strict, thus they raise if you want to stub a method, the spec does not implement. Mockito will also match the function signature. You can pre-configure a specced mock as well:: response = mock({'json': lambda: {'status': 'Ok'}}, spec=requests.Response) Mocks are by default callable. Configure the callable behavior using `when`:: dummy = mock() when(dummy).__call_(1).thenReturn(2) All other magic methods must be configured this way or they will raise an AttributeError. See :func:`verify` to verify your interactions after usage.
[ "Create", "empty", "objects", "(", "Mocks", ")", "." ]
train
https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mocking.py#L193-L279
davidblaisonneau-orange/foreman
foreman/smartProxies.py
SmartProxies.importPuppetClasses
def importPuppetClasses(self, smartProxyId): """ Function importPuppetClasses Force the reload of puppet classes @param smartProxyId: smartProxy Id @return RETURN: the API result """ return self.api.create('{}/{}/import_puppetclasses' .format(self.objName, smartProxyId), '{}')
python
def importPuppetClasses(self, smartProxyId): """ Function importPuppetClasses Force the reload of puppet classes @param smartProxyId: smartProxy Id @return RETURN: the API result """ return self.api.create('{}/{}/import_puppetclasses' .format(self.objName, smartProxyId), '{}')
[ "def", "importPuppetClasses", "(", "self", ",", "smartProxyId", ")", ":", "return", "self", ".", "api", ".", "create", "(", "'{}/{}/import_puppetclasses'", ".", "format", "(", "self", ".", "objName", ",", "smartProxyId", ")", ",", "'{}'", ")" ]
Function importPuppetClasses Force the reload of puppet classes @param smartProxyId: smartProxy Id @return RETURN: the API result
[ "Function", "importPuppetClasses", "Force", "the", "reload", "of", "puppet", "classes" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/smartProxies.py#L29-L37
developersociety/django-glitter
glitter/templates.py
get_templates
def get_templates(model): """ Return a list of templates usable by a model. """ for template_name, template in templates.items(): if issubclass(template.model, model): yield (template_name, template.layout._meta.verbose_name)
python
def get_templates(model): """ Return a list of templates usable by a model. """ for template_name, template in templates.items(): if issubclass(template.model, model): yield (template_name, template.layout._meta.verbose_name)
[ "def", "get_templates", "(", "model", ")", ":", "for", "template_name", ",", "template", "in", "templates", ".", "items", "(", ")", ":", "if", "issubclass", "(", "template", ".", "model", ",", "model", ")", ":", "yield", "(", "template_name", ",", "template", ".", "layout", ".", "_meta", ".", "verbose_name", ")" ]
Return a list of templates usable by a model.
[ "Return", "a", "list", "of", "templates", "usable", "by", "a", "model", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/templates.py#L26-L30
developersociety/django-glitter
glitter/templates.py
attach
def attach(*layouts, **kwargs): """ Registers the given layout(s) classes admin site: @pages.register(Page) class Default(PageLayout): pass """ def _model_admin_wrapper(layout_class): register(layout_class, layouts[0]) return layout_class return _model_admin_wrapper
python
def attach(*layouts, **kwargs): """ Registers the given layout(s) classes admin site: @pages.register(Page) class Default(PageLayout): pass """ def _model_admin_wrapper(layout_class): register(layout_class, layouts[0]) return layout_class return _model_admin_wrapper
[ "def", "attach", "(", "*", "layouts", ",", "*", "*", "kwargs", ")", ":", "def", "_model_admin_wrapper", "(", "layout_class", ")", ":", "register", "(", "layout_class", ",", "layouts", "[", "0", "]", ")", "return", "layout_class", "return", "_model_admin_wrapper" ]
Registers the given layout(s) classes admin site: @pages.register(Page) class Default(PageLayout): pass
[ "Registers", "the", "given", "layout", "(", "s", ")", "classes", "admin", "site", ":" ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/templates.py#L38-L51
davidblaisonneau-orange/foreman
foreman/itemOperatingSystem.py
ItemOperatingSystem.enhance
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ self.update({'os_default_templates': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemOsDefaultTemplate)}) self.update({'config_templates': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemConfigTemplate)}) self.update({'ptables': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemPTable)}) self.update({'media': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemMedia)}) self.update({'architectures': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemArchitecture)})
python
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ self.update({'os_default_templates': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemOsDefaultTemplate)}) self.update({'config_templates': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemConfigTemplate)}) self.update({'ptables': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemPTable)}) self.update({'media': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemMedia)}) self.update({'architectures': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemArchitecture)})
[ "def", "enhance", "(", "self", ")", ":", "self", ".", "update", "(", "{", "'os_default_templates'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemOsDefaultTemplate", ")", "}", ")", "self", ".", "update", "(", "{", "'config_templates'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemConfigTemplate", ")", "}", ")", "self", ".", "update", "(", "{", "'ptables'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemPTable", ")", "}", ")", "self", ".", "update", "(", "{", "'media'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemMedia", ")", "}", ")", "self", ".", "update", "(", "{", "'architectures'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemArchitecture", ")", "}", ")" ]
Function enhance Enhance the object with new item or enhanced items
[ "Function", "enhance", "Enhance", "the", "object", "with", "new", "item", "or", "enhanced", "items" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/itemOperatingSystem.py#L40-L63
quizl/quizler
quizler/lib.py
get_api_envs
def get_api_envs(): """Get required API keys from environment variables.""" client_id = os.environ.get('CLIENT_ID') user_id = os.environ.get('USER_ID') if not client_id or not user_id: raise ValueError('API keys are not found in the environment') return client_id, user_id
python
def get_api_envs(): """Get required API keys from environment variables.""" client_id = os.environ.get('CLIENT_ID') user_id = os.environ.get('USER_ID') if not client_id or not user_id: raise ValueError('API keys are not found in the environment') return client_id, user_id
[ "def", "get_api_envs", "(", ")", ":", "client_id", "=", "os", ".", "environ", ".", "get", "(", "'CLIENT_ID'", ")", "user_id", "=", "os", ".", "environ", ".", "get", "(", "'USER_ID'", ")", "if", "not", "client_id", "or", "not", "user_id", ":", "raise", "ValueError", "(", "'API keys are not found in the environment'", ")", "return", "client_id", ",", "user_id" ]
Get required API keys from environment variables.
[ "Get", "required", "API", "keys", "from", "environment", "variables", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/lib.py#L8-L14
quizl/quizler
quizler/lib.py
api_call
def api_call(method, end_point, params=None, client_id=None, access_token=None): """Call given API end_point with API keys. :param method: HTTP method (e.g. 'get', 'delete'). :param end_point: API endpoint (e.g. 'users/john/sets'). :param params: Dictionary to be sent in the query string (e.g. {'myparam': 'myval'}) :param client_id: Quizlet client ID as string. :param access_token: Quizlet access token as string. client_id and access_token are mutually exclusive but mandatory. """ if bool(client_id) == bool(access_token): raise ValueError('Either client_id or access_token') url = 'https://api.quizlet.com/2.0/{}'.format(end_point) if not params: params = {} if client_id: params['client_id'] = client_id headers = {'Authorization': 'Bearer {}'.format(access_token)} if access_token else None response = requests.request(method, url, params=params, headers=headers) if int(response.status_code / 100) != 2: error_title = '' try: error_title += ', ' + response.json()['error_title'] except ValueError: pass except KeyError: pass raise ValueError( '{} returned {}{}'.format(url, response.status_code, error_title) ) try: return response.json() except json.decoder.JSONDecodeError: pass
python
def api_call(method, end_point, params=None, client_id=None, access_token=None): """Call given API end_point with API keys. :param method: HTTP method (e.g. 'get', 'delete'). :param end_point: API endpoint (e.g. 'users/john/sets'). :param params: Dictionary to be sent in the query string (e.g. {'myparam': 'myval'}) :param client_id: Quizlet client ID as string. :param access_token: Quizlet access token as string. client_id and access_token are mutually exclusive but mandatory. """ if bool(client_id) == bool(access_token): raise ValueError('Either client_id or access_token') url = 'https://api.quizlet.com/2.0/{}'.format(end_point) if not params: params = {} if client_id: params['client_id'] = client_id headers = {'Authorization': 'Bearer {}'.format(access_token)} if access_token else None response = requests.request(method, url, params=params, headers=headers) if int(response.status_code / 100) != 2: error_title = '' try: error_title += ', ' + response.json()['error_title'] except ValueError: pass except KeyError: pass raise ValueError( '{} returned {}{}'.format(url, response.status_code, error_title) ) try: return response.json() except json.decoder.JSONDecodeError: pass
[ "def", "api_call", "(", "method", ",", "end_point", ",", "params", "=", "None", ",", "client_id", "=", "None", ",", "access_token", "=", "None", ")", ":", "if", "bool", "(", "client_id", ")", "==", "bool", "(", "access_token", ")", ":", "raise", "ValueError", "(", "'Either client_id or access_token'", ")", "url", "=", "'https://api.quizlet.com/2.0/{}'", ".", "format", "(", "end_point", ")", "if", "not", "params", ":", "params", "=", "{", "}", "if", "client_id", ":", "params", "[", "'client_id'", "]", "=", "client_id", "headers", "=", "{", "'Authorization'", ":", "'Bearer {}'", ".", "format", "(", "access_token", ")", "}", "if", "access_token", "else", "None", "response", "=", "requests", ".", "request", "(", "method", ",", "url", ",", "params", "=", "params", ",", "headers", "=", "headers", ")", "if", "int", "(", "response", ".", "status_code", "/", "100", ")", "!=", "2", ":", "error_title", "=", "''", "try", ":", "error_title", "+=", "', '", "+", "response", ".", "json", "(", ")", "[", "'error_title'", "]", "except", "ValueError", ":", "pass", "except", "KeyError", ":", "pass", "raise", "ValueError", "(", "'{} returned {}{}'", ".", "format", "(", "url", ",", "response", ".", "status_code", ",", "error_title", ")", ")", "try", ":", "return", "response", ".", "json", "(", ")", "except", "json", ".", "decoder", ".", "JSONDecodeError", ":", "pass" ]
Call given API end_point with API keys. :param method: HTTP method (e.g. 'get', 'delete'). :param end_point: API endpoint (e.g. 'users/john/sets'). :param params: Dictionary to be sent in the query string (e.g. {'myparam': 'myval'}) :param client_id: Quizlet client ID as string. :param access_token: Quizlet access token as string. client_id and access_token are mutually exclusive but mandatory.
[ "Call", "given", "API", "end_point", "with", "API", "keys", ".", ":", "param", "method", ":", "HTTP", "method", "(", "e", ".", "g", ".", "get", "delete", ")", ".", ":", "param", "end_point", ":", "API", "endpoint", "(", "e", ".", "g", ".", "users", "/", "john", "/", "sets", ")", ".", ":", "param", "params", ":", "Dictionary", "to", "be", "sent", "in", "the", "query", "string", "(", "e", ".", "g", ".", "{", "myparam", ":", "myval", "}", ")", ":", "param", "client_id", ":", "Quizlet", "client", "ID", "as", "string", ".", ":", "param", "access_token", ":", "Quizlet", "access", "token", "as", "string", ".", "client_id", "and", "access_token", "are", "mutually", "exclusive", "but", "mandatory", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/lib.py#L17-L55
qubell/contrib-python-qubell-client
qubell/api/private/service.py
ServiceMixin.request_upload_secret
def request_upload_secret(self, secret_id): """ :return: json with "keyId" as secret and "url" for posting key """ return self._router.post_request_upload_secret(org_id=self.organizationId, instance_id=self.instanceId, secret_id=secret_id).json()
python
def request_upload_secret(self, secret_id): """ :return: json with "keyId" as secret and "url" for posting key """ return self._router.post_request_upload_secret(org_id=self.organizationId, instance_id=self.instanceId, secret_id=secret_id).json()
[ "def", "request_upload_secret", "(", "self", ",", "secret_id", ")", ":", "return", "self", ".", "_router", ".", "post_request_upload_secret", "(", "org_id", "=", "self", ".", "organizationId", ",", "instance_id", "=", "self", ".", "instanceId", ",", "secret_id", "=", "secret_id", ")", ".", "json", "(", ")" ]
:return: json with "keyId" as secret and "url" for posting key
[ ":", "return", ":", "json", "with", "keyId", "as", "secret", "and", "url", "for", "posting", "key" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/service.py#L119-L125
davidblaisonneau-orange/foreman
foreman/subnets.py
Subnets.checkAndCreate
def checkAndCreate(self, key, payload, domainId): """ Function checkAndCreate Check if a subnet exists and create it if not @param key: The targeted subnet @param payload: The targeted subnet description @param domainId: The domainId to be attached wiuth the subnet @return RETURN: The id of the subnet """ if key not in self: self[key] = payload oid = self[key]['id'] if not oid: return False #~ Ensure subnet contains the domain subnetDomainIds = [] for domain in self[key]['domains']: subnetDomainIds.append(domain['id']) if domainId not in subnetDomainIds: subnetDomainIds.append(domainId) self[key]["domain_ids"] = subnetDomainIds if len(self[key]["domains"]) is not len(subnetDomainIds): return False return oid
python
def checkAndCreate(self, key, payload, domainId): """ Function checkAndCreate Check if a subnet exists and create it if not @param key: The targeted subnet @param payload: The targeted subnet description @param domainId: The domainId to be attached wiuth the subnet @return RETURN: The id of the subnet """ if key not in self: self[key] = payload oid = self[key]['id'] if not oid: return False #~ Ensure subnet contains the domain subnetDomainIds = [] for domain in self[key]['domains']: subnetDomainIds.append(domain['id']) if domainId not in subnetDomainIds: subnetDomainIds.append(domainId) self[key]["domain_ids"] = subnetDomainIds if len(self[key]["domains"]) is not len(subnetDomainIds): return False return oid
[ "def", "checkAndCreate", "(", "self", ",", "key", ",", "payload", ",", "domainId", ")", ":", "if", "key", "not", "in", "self", ":", "self", "[", "key", "]", "=", "payload", "oid", "=", "self", "[", "key", "]", "[", "'id'", "]", "if", "not", "oid", ":", "return", "False", "#~ Ensure subnet contains the domain", "subnetDomainIds", "=", "[", "]", "for", "domain", "in", "self", "[", "key", "]", "[", "'domains'", "]", ":", "subnetDomainIds", ".", "append", "(", "domain", "[", "'id'", "]", ")", "if", "domainId", "not", "in", "subnetDomainIds", ":", "subnetDomainIds", ".", "append", "(", "domainId", ")", "self", "[", "key", "]", "[", "\"domain_ids\"", "]", "=", "subnetDomainIds", "if", "len", "(", "self", "[", "key", "]", "[", "\"domains\"", "]", ")", "is", "not", "len", "(", "subnetDomainIds", ")", ":", "return", "False", "return", "oid" ]
Function checkAndCreate Check if a subnet exists and create it if not @param key: The targeted subnet @param payload: The targeted subnet description @param domainId: The domainId to be attached wiuth the subnet @return RETURN: The id of the subnet
[ "Function", "checkAndCreate", "Check", "if", "a", "subnet", "exists", "and", "create", "it", "if", "not" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/subnets.py#L29-L52
davidblaisonneau-orange/foreman
foreman/subnets.py
Subnets.removeDomain
def removeDomain(self, subnetId, domainId): """ Function removeDomain Delete a domain from a subnet @param subnetId: The subnet Id @param domainId: The domainId to be attached wiuth the subnet @return RETURN: boolean """ subnetDomainIds = [] for domain in self[subnetId]['domains']: subnetDomainIds.append(domain['id']) subnetDomainIds.remove(domainId) self[subnetId]["domain_ids"] = subnetDomainIds return len(self[subnetId]["domains"]) is len(subnetDomainIds)
python
def removeDomain(self, subnetId, domainId): """ Function removeDomain Delete a domain from a subnet @param subnetId: The subnet Id @param domainId: The domainId to be attached wiuth the subnet @return RETURN: boolean """ subnetDomainIds = [] for domain in self[subnetId]['domains']: subnetDomainIds.append(domain['id']) subnetDomainIds.remove(domainId) self[subnetId]["domain_ids"] = subnetDomainIds return len(self[subnetId]["domains"]) is len(subnetDomainIds)
[ "def", "removeDomain", "(", "self", ",", "subnetId", ",", "domainId", ")", ":", "subnetDomainIds", "=", "[", "]", "for", "domain", "in", "self", "[", "subnetId", "]", "[", "'domains'", "]", ":", "subnetDomainIds", ".", "append", "(", "domain", "[", "'id'", "]", ")", "subnetDomainIds", ".", "remove", "(", "domainId", ")", "self", "[", "subnetId", "]", "[", "\"domain_ids\"", "]", "=", "subnetDomainIds", "return", "len", "(", "self", "[", "subnetId", "]", "[", "\"domains\"", "]", ")", "is", "len", "(", "subnetDomainIds", ")" ]
Function removeDomain Delete a domain from a subnet @param subnetId: The subnet Id @param domainId: The domainId to be attached wiuth the subnet @return RETURN: boolean
[ "Function", "removeDomain", "Delete", "a", "domain", "from", "a", "subnet" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/subnets.py#L54-L67
MatterMiners/cobald
cobald/daemon/runners/guard.py
exclusive
def exclusive(via=threading.Lock): """ Mark a callable as exclusive :param via: factory for a Lock to guard the callable Guards the callable against being entered again before completion. Explicitly raises a :py:exc:`RuntimeError` on violation. :note: If applied to a method, it is exclusive across all instances. """ def make_exclusive(fnc): fnc_guard = via() @functools.wraps(fnc) def exclusive_call(*args, **kwargs): if fnc_guard.acquire(blocking=False): try: return fnc(*args, **kwargs) finally: fnc_guard.release() else: raise RuntimeError('exclusive call to %s violated') return exclusive_call return make_exclusive
python
def exclusive(via=threading.Lock): """ Mark a callable as exclusive :param via: factory for a Lock to guard the callable Guards the callable against being entered again before completion. Explicitly raises a :py:exc:`RuntimeError` on violation. :note: If applied to a method, it is exclusive across all instances. """ def make_exclusive(fnc): fnc_guard = via() @functools.wraps(fnc) def exclusive_call(*args, **kwargs): if fnc_guard.acquire(blocking=False): try: return fnc(*args, **kwargs) finally: fnc_guard.release() else: raise RuntimeError('exclusive call to %s violated') return exclusive_call return make_exclusive
[ "def", "exclusive", "(", "via", "=", "threading", ".", "Lock", ")", ":", "def", "make_exclusive", "(", "fnc", ")", ":", "fnc_guard", "=", "via", "(", ")", "@", "functools", ".", "wraps", "(", "fnc", ")", "def", "exclusive_call", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "fnc_guard", ".", "acquire", "(", "blocking", "=", "False", ")", ":", "try", ":", "return", "fnc", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "fnc_guard", ".", "release", "(", ")", "else", ":", "raise", "RuntimeError", "(", "'exclusive call to %s violated'", ")", "return", "exclusive_call", "return", "make_exclusive" ]
Mark a callable as exclusive :param via: factory for a Lock to guard the callable Guards the callable against being entered again before completion. Explicitly raises a :py:exc:`RuntimeError` on violation. :note: If applied to a method, it is exclusive across all instances.
[ "Mark", "a", "callable", "as", "exclusive" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/guard.py#L5-L29
MatterMiners/cobald
cobald/daemon/runners/service.py
service
def service(flavour): r""" Mark a class as implementing a Service Each Service class must have a ``run`` method, which does not take any arguments. This method is :py:meth:`~.ServiceRunner.adopt`\ ed after the daemon starts, unless * the Service has been garbage collected, or * the ServiceUnit has been :py:meth:`~.ServiceUnit.cancel`\ ed. For each service instance, its :py:class:`~.ServiceUnit` is available at ``service_instance.__service_unit__``. """ def service_unit_decorator(raw_cls): __new__ = raw_cls.__new__ def __new_service__(cls, *args, **kwargs): if __new__ is object.__new__: self = __new__(cls) else: self = __new__(cls, *args, **kwargs) service_unit = ServiceUnit(self, flavour) self.__service_unit__ = service_unit return self raw_cls.__new__ = __new_service__ if raw_cls.run.__doc__ is None: raw_cls.run.__doc__ = "Service entry point" return raw_cls return service_unit_decorator
python
def service(flavour): r""" Mark a class as implementing a Service Each Service class must have a ``run`` method, which does not take any arguments. This method is :py:meth:`~.ServiceRunner.adopt`\ ed after the daemon starts, unless * the Service has been garbage collected, or * the ServiceUnit has been :py:meth:`~.ServiceUnit.cancel`\ ed. For each service instance, its :py:class:`~.ServiceUnit` is available at ``service_instance.__service_unit__``. """ def service_unit_decorator(raw_cls): __new__ = raw_cls.__new__ def __new_service__(cls, *args, **kwargs): if __new__ is object.__new__: self = __new__(cls) else: self = __new__(cls, *args, **kwargs) service_unit = ServiceUnit(self, flavour) self.__service_unit__ = service_unit return self raw_cls.__new__ = __new_service__ if raw_cls.run.__doc__ is None: raw_cls.run.__doc__ = "Service entry point" return raw_cls return service_unit_decorator
[ "def", "service", "(", "flavour", ")", ":", "def", "service_unit_decorator", "(", "raw_cls", ")", ":", "__new__", "=", "raw_cls", ".", "__new__", "def", "__new_service__", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "__new__", "is", "object", ".", "__new__", ":", "self", "=", "__new__", "(", "cls", ")", "else", ":", "self", "=", "__new__", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", "service_unit", "=", "ServiceUnit", "(", "self", ",", "flavour", ")", "self", ".", "__service_unit__", "=", "service_unit", "return", "self", "raw_cls", ".", "__new__", "=", "__new_service__", "if", "raw_cls", ".", "run", ".", "__doc__", "is", "None", ":", "raw_cls", ".", "run", ".", "__doc__", "=", "\"Service entry point\"", "return", "raw_cls", "return", "service_unit_decorator" ]
r""" Mark a class as implementing a Service Each Service class must have a ``run`` method, which does not take any arguments. This method is :py:meth:`~.ServiceRunner.adopt`\ ed after the daemon starts, unless * the Service has been garbage collected, or * the ServiceUnit has been :py:meth:`~.ServiceUnit.cancel`\ ed. For each service instance, its :py:class:`~.ServiceUnit` is available at ``service_instance.__service_unit__``.
[ "r", "Mark", "a", "class", "as", "implementing", "a", "Service" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/service.py#L54-L82
MatterMiners/cobald
cobald/daemon/runners/service.py
ServiceRunner.execute
def execute(self, payload, *args, flavour: ModuleType, **kwargs): """ Synchronously run ``payload`` and provide its output If ``*args*`` and/or ``**kwargs`` are provided, pass them to ``payload`` upon execution. """ if args or kwargs: payload = functools.partial(payload, *args, **kwargs) return self._meta_runner.run_payload(payload, flavour=flavour)
python
def execute(self, payload, *args, flavour: ModuleType, **kwargs): """ Synchronously run ``payload`` and provide its output If ``*args*`` and/or ``**kwargs`` are provided, pass them to ``payload`` upon execution. """ if args or kwargs: payload = functools.partial(payload, *args, **kwargs) return self._meta_runner.run_payload(payload, flavour=flavour)
[ "def", "execute", "(", "self", ",", "payload", ",", "*", "args", ",", "flavour", ":", "ModuleType", ",", "*", "*", "kwargs", ")", ":", "if", "args", "or", "kwargs", ":", "payload", "=", "functools", ".", "partial", "(", "payload", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self", ".", "_meta_runner", ".", "run_payload", "(", "payload", ",", "flavour", "=", "flavour", ")" ]
Synchronously run ``payload`` and provide its output If ``*args*`` and/or ``**kwargs`` are provided, pass them to ``payload`` upon execution.
[ "Synchronously", "run", "payload", "and", "provide", "its", "output" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/service.py#L97-L105
MatterMiners/cobald
cobald/daemon/runners/service.py
ServiceRunner.adopt
def adopt(self, payload, *args, flavour: ModuleType, **kwargs): """ Concurrently run ``payload`` in the background If ``*args*`` and/or ``**kwargs`` are provided, pass them to ``payload`` upon execution. """ if args or kwargs: payload = functools.partial(payload, *args, **kwargs) self._meta_runner.register_payload(payload, flavour=flavour)
python
def adopt(self, payload, *args, flavour: ModuleType, **kwargs): """ Concurrently run ``payload`` in the background If ``*args*`` and/or ``**kwargs`` are provided, pass them to ``payload`` upon execution. """ if args or kwargs: payload = functools.partial(payload, *args, **kwargs) self._meta_runner.register_payload(payload, flavour=flavour)
[ "def", "adopt", "(", "self", ",", "payload", ",", "*", "args", ",", "flavour", ":", "ModuleType", ",", "*", "*", "kwargs", ")", ":", "if", "args", "or", "kwargs", ":", "payload", "=", "functools", ".", "partial", "(", "payload", ",", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "_meta_runner", ".", "register_payload", "(", "payload", ",", "flavour", "=", "flavour", ")" ]
Concurrently run ``payload`` in the background If ``*args*`` and/or ``**kwargs`` are provided, pass them to ``payload`` upon execution.
[ "Concurrently", "run", "payload", "in", "the", "background" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/service.py#L107-L115
MatterMiners/cobald
cobald/daemon/runners/service.py
ServiceRunner.accept
def accept(self): """ Start accepting synchronous, asynchronous and service payloads Since services are globally defined, only one :py:class:`ServiceRunner` may :py:meth:`accept` payloads at any time. """ if self._meta_runner: raise RuntimeError('payloads scheduled for %s before being started' % self) self._must_shutdown = False self._logger.info('%s starting', self.__class__.__name__) # force collecting objects so that defunct, migrated and overwritten services are destroyed now gc.collect() self._adopt_services() self.adopt(self._accept_services, flavour=trio) self._meta_runner.run()
python
def accept(self): """ Start accepting synchronous, asynchronous and service payloads Since services are globally defined, only one :py:class:`ServiceRunner` may :py:meth:`accept` payloads at any time. """ if self._meta_runner: raise RuntimeError('payloads scheduled for %s before being started' % self) self._must_shutdown = False self._logger.info('%s starting', self.__class__.__name__) # force collecting objects so that defunct, migrated and overwritten services are destroyed now gc.collect() self._adopt_services() self.adopt(self._accept_services, flavour=trio) self._meta_runner.run()
[ "def", "accept", "(", "self", ")", ":", "if", "self", ".", "_meta_runner", ":", "raise", "RuntimeError", "(", "'payloads scheduled for %s before being started'", "%", "self", ")", "self", ".", "_must_shutdown", "=", "False", "self", ".", "_logger", ".", "info", "(", "'%s starting'", ",", "self", ".", "__class__", ".", "__name__", ")", "# force collecting objects so that defunct, migrated and overwritten services are destroyed now", "gc", ".", "collect", "(", ")", "self", ".", "_adopt_services", "(", ")", "self", ".", "adopt", "(", "self", ".", "_accept_services", ",", "flavour", "=", "trio", ")", "self", ".", "_meta_runner", ".", "run", "(", ")" ]
Start accepting synchronous, asynchronous and service payloads Since services are globally defined, only one :py:class:`ServiceRunner` may :py:meth:`accept` payloads at any time.
[ "Start", "accepting", "synchronous", "asynchronous", "and", "service", "payloads" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/service.py#L118-L133
MatterMiners/cobald
cobald/daemon/runners/service.py
ServiceRunner.shutdown
def shutdown(self): """Shutdown the accept loop and stop running payloads""" self._must_shutdown = True self._is_shutdown.wait() self._meta_runner.stop()
python
def shutdown(self): """Shutdown the accept loop and stop running payloads""" self._must_shutdown = True self._is_shutdown.wait() self._meta_runner.stop()
[ "def", "shutdown", "(", "self", ")", ":", "self", ".", "_must_shutdown", "=", "True", "self", ".", "_is_shutdown", ".", "wait", "(", ")", "self", ".", "_meta_runner", ".", "stop", "(", ")" ]
Shutdown the accept loop and stop running payloads
[ "Shutdown", "the", "accept", "loop", "and", "stop", "running", "payloads" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/service.py#L135-L139
quantmind/agile-toolkit
agiletoolkit/github/milestones.py
milestones
def milestones(ctx, list, close): """View/edit/close milestones on github """ repos = get_repos(ctx.parent.agile.get('labels')) if list: _list_milestones(repos) elif close: click.echo('Closing milestones "%s"' % close) _close_milestone(repos, close) else: click.echo(ctx.get_help())
python
def milestones(ctx, list, close): """View/edit/close milestones on github """ repos = get_repos(ctx.parent.agile.get('labels')) if list: _list_milestones(repos) elif close: click.echo('Closing milestones "%s"' % close) _close_milestone(repos, close) else: click.echo(ctx.get_help())
[ "def", "milestones", "(", "ctx", ",", "list", ",", "close", ")", ":", "repos", "=", "get_repos", "(", "ctx", ".", "parent", ".", "agile", ".", "get", "(", "'labels'", ")", ")", "if", "list", ":", "_list_milestones", "(", "repos", ")", "elif", "close", ":", "click", ".", "echo", "(", "'Closing milestones \"%s\"'", "%", "close", ")", "_close_milestone", "(", "repos", ",", "close", ")", "else", ":", "click", ".", "echo", "(", "ctx", ".", "get_help", "(", ")", ")" ]
View/edit/close milestones on github
[ "View", "/", "edit", "/", "close", "milestones", "on", "github" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/github/milestones.py#L13-L23
aroberge/experimental
experimental/core/console.py
start_console
def start_console(local_vars={}): '''Starts a console; modified from code.interact''' transforms.CONSOLE_ACTIVE = True transforms.remove_not_allowed_in_console() sys.ps1 = prompt console = ExperimentalInteractiveConsole(locals=local_vars) console.interact(banner=banner)
python
def start_console(local_vars={}): '''Starts a console; modified from code.interact''' transforms.CONSOLE_ACTIVE = True transforms.remove_not_allowed_in_console() sys.ps1 = prompt console = ExperimentalInteractiveConsole(locals=local_vars) console.interact(banner=banner)
[ "def", "start_console", "(", "local_vars", "=", "{", "}", ")", ":", "transforms", ".", "CONSOLE_ACTIVE", "=", "True", "transforms", ".", "remove_not_allowed_in_console", "(", ")", "sys", ".", "ps1", "=", "prompt", "console", "=", "ExperimentalInteractiveConsole", "(", "locals", "=", "local_vars", ")", "console", ".", "interact", "(", "banner", "=", "banner", ")" ]
Starts a console; modified from code.interact
[ "Starts", "a", "console", ";", "modified", "from", "code", ".", "interact" ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/console.py#L66-L72
aroberge/experimental
experimental/core/console.py
ExperimentalInteractiveConsole.push
def push(self, line): """Transform and push a line to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a buffer and the interpreter's runsource() method is called with the concatenated contents of the buffer as source. If this indicates that the command was executed or invalid, the buffer is reset; otherwise, the command is incomplete, and the buffer is left as it was after the line was appended. The return value is 1 if more input is required, 0 if the line was dealt with in some way (this is the same as runsource()). """ if transforms.FROM_EXPERIMENTAL.match(line): transforms.add_transformers(line) self.buffer.append("\n") else: self.buffer.append(line) add_pass = False if line.rstrip(' ').endswith(":"): add_pass = True source = "\n".join(self.buffer) if add_pass: source += "pass" source = transforms.transform(source) if add_pass: source = source.rstrip(' ') if source.endswith("pass"): source = source[:-4] # some transformations may strip an empty line meant to end a block if not self.buffer[-1]: source += "\n" try: more = self.runsource(source, self.filename) except SystemExit: os._exit(1) if not more: self.resetbuffer() return more
python
def push(self, line): """Transform and push a line to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a buffer and the interpreter's runsource() method is called with the concatenated contents of the buffer as source. If this indicates that the command was executed or invalid, the buffer is reset; otherwise, the command is incomplete, and the buffer is left as it was after the line was appended. The return value is 1 if more input is required, 0 if the line was dealt with in some way (this is the same as runsource()). """ if transforms.FROM_EXPERIMENTAL.match(line): transforms.add_transformers(line) self.buffer.append("\n") else: self.buffer.append(line) add_pass = False if line.rstrip(' ').endswith(":"): add_pass = True source = "\n".join(self.buffer) if add_pass: source += "pass" source = transforms.transform(source) if add_pass: source = source.rstrip(' ') if source.endswith("pass"): source = source[:-4] # some transformations may strip an empty line meant to end a block if not self.buffer[-1]: source += "\n" try: more = self.runsource(source, self.filename) except SystemExit: os._exit(1) if not more: self.resetbuffer() return more
[ "def", "push", "(", "self", ",", "line", ")", ":", "if", "transforms", ".", "FROM_EXPERIMENTAL", ".", "match", "(", "line", ")", ":", "transforms", ".", "add_transformers", "(", "line", ")", "self", ".", "buffer", ".", "append", "(", "\"\\n\"", ")", "else", ":", "self", ".", "buffer", ".", "append", "(", "line", ")", "add_pass", "=", "False", "if", "line", ".", "rstrip", "(", "' '", ")", ".", "endswith", "(", "\":\"", ")", ":", "add_pass", "=", "True", "source", "=", "\"\\n\"", ".", "join", "(", "self", ".", "buffer", ")", "if", "add_pass", ":", "source", "+=", "\"pass\"", "source", "=", "transforms", ".", "transform", "(", "source", ")", "if", "add_pass", ":", "source", "=", "source", ".", "rstrip", "(", "' '", ")", "if", "source", ".", "endswith", "(", "\"pass\"", ")", ":", "source", "=", "source", "[", ":", "-", "4", "]", "# some transformations may strip an empty line meant to end a block", "if", "not", "self", ".", "buffer", "[", "-", "1", "]", ":", "source", "+=", "\"\\n\"", "try", ":", "more", "=", "self", ".", "runsource", "(", "source", ",", "self", ".", "filename", ")", "except", "SystemExit", ":", "os", ".", "_exit", "(", "1", ")", "if", "not", "more", ":", "self", ".", "resetbuffer", "(", ")", "return", "more" ]
Transform and push a line to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a buffer and the interpreter's runsource() method is called with the concatenated contents of the buffer as source. If this indicates that the command was executed or invalid, the buffer is reset; otherwise, the command is incomplete, and the buffer is left as it was after the line was appended. The return value is 1 if more input is required, 0 if the line was dealt with in some way (this is the same as runsource()).
[ "Transform", "and", "push", "a", "line", "to", "the", "interpreter", "." ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/console.py#L21-L63
frostming/atoml
atoml/encoder.py
dump
def dump(obj, f, preserve=False): """Write dict object into file :param obj: the object to be dumped into toml :param f: the file object :param preserve: optional flag to preserve the inline table in result """ if not f.write: raise TypeError('You can only dump an object into a file object') encoder = Encoder(f, preserve=preserve) return encoder.write_dict(obj)
python
def dump(obj, f, preserve=False): """Write dict object into file :param obj: the object to be dumped into toml :param f: the file object :param preserve: optional flag to preserve the inline table in result """ if not f.write: raise TypeError('You can only dump an object into a file object') encoder = Encoder(f, preserve=preserve) return encoder.write_dict(obj)
[ "def", "dump", "(", "obj", ",", "f", ",", "preserve", "=", "False", ")", ":", "if", "not", "f", ".", "write", ":", "raise", "TypeError", "(", "'You can only dump an object into a file object'", ")", "encoder", "=", "Encoder", "(", "f", ",", "preserve", "=", "preserve", ")", "return", "encoder", ".", "write_dict", "(", "obj", ")" ]
Write dict object into file :param obj: the object to be dumped into toml :param f: the file object :param preserve: optional flag to preserve the inline table in result
[ "Write", "dict", "object", "into", "file" ]
train
https://github.com/frostming/atoml/blob/85414ef77777366887a819a05b496d5279296cd2/atoml/encoder.py#L180-L190
frostming/atoml
atoml/encoder.py
dumps
def dumps(obj, preserve=False): """Stringifies a dict as toml :param obj: the object to be dumped into toml :param preserve: optional flag to preserve the inline table in result """ f = StringIO() dump(obj, f, preserve) return f.getvalue()
python
def dumps(obj, preserve=False): """Stringifies a dict as toml :param obj: the object to be dumped into toml :param preserve: optional flag to preserve the inline table in result """ f = StringIO() dump(obj, f, preserve) return f.getvalue()
[ "def", "dumps", "(", "obj", ",", "preserve", "=", "False", ")", ":", "f", "=", "StringIO", "(", ")", "dump", "(", "obj", ",", "f", ",", "preserve", ")", "return", "f", ".", "getvalue", "(", ")" ]
Stringifies a dict as toml :param obj: the object to be dumped into toml :param preserve: optional flag to preserve the inline table in result
[ "Stringifies", "a", "dict", "as", "toml" ]
train
https://github.com/frostming/atoml/blob/85414ef77777366887a819a05b496d5279296cd2/atoml/encoder.py#L193-L201
alixedi/palal
palal/license.py
license_loader
def license_loader(lic_dir=LIC_DIR): """Loads licenses from the given directory.""" lics = [] for ln in os.listdir(lic_dir): lp = os.path.join(lic_dir, ln) with open(lp) as lf: txt = lf.read() lic = License(txt) lics.append(lic) return lics
python
def license_loader(lic_dir=LIC_DIR): """Loads licenses from the given directory.""" lics = [] for ln in os.listdir(lic_dir): lp = os.path.join(lic_dir, ln) with open(lp) as lf: txt = lf.read() lic = License(txt) lics.append(lic) return lics
[ "def", "license_loader", "(", "lic_dir", "=", "LIC_DIR", ")", ":", "lics", "=", "[", "]", "for", "ln", "in", "os", ".", "listdir", "(", "lic_dir", ")", ":", "lp", "=", "os", ".", "path", ".", "join", "(", "lic_dir", ",", "ln", ")", "with", "open", "(", "lp", ")", "as", "lf", ":", "txt", "=", "lf", ".", "read", "(", ")", "lic", "=", "License", "(", "txt", ")", "lics", ".", "append", "(", "lic", ")", "return", "lics" ]
Loads licenses from the given directory.
[ "Loads", "licenses", "from", "the", "given", "directory", "." ]
train
https://github.com/alixedi/palal/blob/325359f66ac48a9f96efea0489aec353f8a40837/palal/license.py#L46-L55
alixedi/palal
palal/license.py
License.get_vector
def get_vector(self, max_choice=3): """Return pseudo-choice vectors.""" vec = {} for dim in ['forbidden', 'required', 'permitted']: if self.meta[dim] is None: continue dim_vec = map(lambda x: (x, max_choice), self.meta[dim]) vec[dim] = dict(dim_vec) return vec
python
def get_vector(self, max_choice=3): """Return pseudo-choice vectors.""" vec = {} for dim in ['forbidden', 'required', 'permitted']: if self.meta[dim] is None: continue dim_vec = map(lambda x: (x, max_choice), self.meta[dim]) vec[dim] = dict(dim_vec) return vec
[ "def", "get_vector", "(", "self", ",", "max_choice", "=", "3", ")", ":", "vec", "=", "{", "}", "for", "dim", "in", "[", "'forbidden'", ",", "'required'", ",", "'permitted'", "]", ":", "if", "self", ".", "meta", "[", "dim", "]", "is", "None", ":", "continue", "dim_vec", "=", "map", "(", "lambda", "x", ":", "(", "x", ",", "max_choice", ")", ",", "self", ".", "meta", "[", "dim", "]", ")", "vec", "[", "dim", "]", "=", "dict", "(", "dim_vec", ")", "return", "vec" ]
Return pseudo-choice vectors.
[ "Return", "pseudo", "-", "choice", "vectors", "." ]
train
https://github.com/alixedi/palal/blob/325359f66ac48a9f96efea0489aec353f8a40837/palal/license.py#L35-L43
qubell/contrib-python-qubell-client
qubell/cli/__main__.py
entity
def entity(ctx, debug, uncolorize, **kwargs): """ CLI for tonomi.com using contrib-python-qubell-client To enable completion: eval "$(_NOMI_COMPLETE=source nomi)" """ global PROVIDER_CONFIG if debug: log.basicConfig(level=log.DEBUG) log.getLogger("requests.packages.urllib3.connectionpool").setLevel(log.DEBUG) for (k, v) in kwargs.iteritems(): if v: QUBELL[k] = v PROVIDER_CONFIG = { 'configuration.provider': PROVIDER['provider_type'], 'configuration.legacy-regions': PROVIDER['provider_region'], 'configuration.endpoint-url': '', 'configuration.legacy-security-group': '', 'configuration.identity': PROVIDER['provider_identity'], 'configuration.credential': PROVIDER['provider_credential'] } class UserContext(object): def __init__(self): self.platform = None self.unauthenticated_platform = None self.colorize = not (uncolorize) def get_platform(self): if not self.platform: assert QUBELL["tenant"], "No platform URL provided. Set QUBELL_TENANT or use --tenant option." if not QUBELL["token"]: assert QUBELL["user"], "No username. Set QUBELL_USER or use --user option." assert QUBELL["password"], "No password provided. Set QUBELL_PASSWORD or use --password option." self.platform = QubellPlatform.connect( tenant=QUBELL["tenant"], user=QUBELL["user"], password=QUBELL["password"], token=QUBELL["token"]) return self.platform def get_unauthenticated_platform(self): if not self.unauthenticated_platform: assert QUBELL["tenant"], "No platform URL provided. Set QUBELL_TENANT or use --tenant option." self.unauthenticated_platform = QubellPlatform.connect(tenant=QUBELL["tenant"]) return self.unauthenticated_platform ctx = click.get_current_context() ctx.obj = UserContext()
python
def entity(ctx, debug, uncolorize, **kwargs): """ CLI for tonomi.com using contrib-python-qubell-client To enable completion: eval "$(_NOMI_COMPLETE=source nomi)" """ global PROVIDER_CONFIG if debug: log.basicConfig(level=log.DEBUG) log.getLogger("requests.packages.urllib3.connectionpool").setLevel(log.DEBUG) for (k, v) in kwargs.iteritems(): if v: QUBELL[k] = v PROVIDER_CONFIG = { 'configuration.provider': PROVIDER['provider_type'], 'configuration.legacy-regions': PROVIDER['provider_region'], 'configuration.endpoint-url': '', 'configuration.legacy-security-group': '', 'configuration.identity': PROVIDER['provider_identity'], 'configuration.credential': PROVIDER['provider_credential'] } class UserContext(object): def __init__(self): self.platform = None self.unauthenticated_platform = None self.colorize = not (uncolorize) def get_platform(self): if not self.platform: assert QUBELL["tenant"], "No platform URL provided. Set QUBELL_TENANT or use --tenant option." if not QUBELL["token"]: assert QUBELL["user"], "No username. Set QUBELL_USER or use --user option." assert QUBELL["password"], "No password provided. Set QUBELL_PASSWORD or use --password option." self.platform = QubellPlatform.connect( tenant=QUBELL["tenant"], user=QUBELL["user"], password=QUBELL["password"], token=QUBELL["token"]) return self.platform def get_unauthenticated_platform(self): if not self.unauthenticated_platform: assert QUBELL["tenant"], "No platform URL provided. Set QUBELL_TENANT or use --tenant option." self.unauthenticated_platform = QubellPlatform.connect(tenant=QUBELL["tenant"]) return self.unauthenticated_platform ctx = click.get_current_context() ctx.obj = UserContext()
[ "def", "entity", "(", "ctx", ",", "debug", ",", "uncolorize", ",", "*", "*", "kwargs", ")", ":", "global", "PROVIDER_CONFIG", "if", "debug", ":", "log", ".", "basicConfig", "(", "level", "=", "log", ".", "DEBUG", ")", "log", ".", "getLogger", "(", "\"requests.packages.urllib3.connectionpool\"", ")", ".", "setLevel", "(", "log", ".", "DEBUG", ")", "for", "(", "k", ",", "v", ")", "in", "kwargs", ".", "iteritems", "(", ")", ":", "if", "v", ":", "QUBELL", "[", "k", "]", "=", "v", "PROVIDER_CONFIG", "=", "{", "'configuration.provider'", ":", "PROVIDER", "[", "'provider_type'", "]", ",", "'configuration.legacy-regions'", ":", "PROVIDER", "[", "'provider_region'", "]", ",", "'configuration.endpoint-url'", ":", "''", ",", "'configuration.legacy-security-group'", ":", "''", ",", "'configuration.identity'", ":", "PROVIDER", "[", "'provider_identity'", "]", ",", "'configuration.credential'", ":", "PROVIDER", "[", "'provider_credential'", "]", "}", "class", "UserContext", "(", "object", ")", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "platform", "=", "None", "self", ".", "unauthenticated_platform", "=", "None", "self", ".", "colorize", "=", "not", "(", "uncolorize", ")", "def", "get_platform", "(", "self", ")", ":", "if", "not", "self", ".", "platform", ":", "assert", "QUBELL", "[", "\"tenant\"", "]", ",", "\"No platform URL provided. Set QUBELL_TENANT or use --tenant option.\"", "if", "not", "QUBELL", "[", "\"token\"", "]", ":", "assert", "QUBELL", "[", "\"user\"", "]", ",", "\"No username. Set QUBELL_USER or use --user option.\"", "assert", "QUBELL", "[", "\"password\"", "]", ",", "\"No password provided. Set QUBELL_PASSWORD or use --password option.\"", "self", ".", "platform", "=", "QubellPlatform", ".", "connect", "(", "tenant", "=", "QUBELL", "[", "\"tenant\"", "]", ",", "user", "=", "QUBELL", "[", "\"user\"", "]", ",", "password", "=", "QUBELL", "[", "\"password\"", "]", ",", "token", "=", "QUBELL", "[", "\"token\"", "]", ")", "return", "self", ".", "platform", "def", "get_unauthenticated_platform", "(", "self", ")", ":", "if", "not", "self", ".", "unauthenticated_platform", ":", "assert", "QUBELL", "[", "\"tenant\"", "]", ",", "\"No platform URL provided. Set QUBELL_TENANT or use --tenant option.\"", "self", ".", "unauthenticated_platform", "=", "QubellPlatform", ".", "connect", "(", "tenant", "=", "QUBELL", "[", "\"tenant\"", "]", ")", "return", "self", ".", "unauthenticated_platform", "ctx", "=", "click", ".", "get_current_context", "(", ")", "ctx", ".", "obj", "=", "UserContext", "(", ")" ]
CLI for tonomi.com using contrib-python-qubell-client To enable completion: eval "$(_NOMI_COMPLETE=source nomi)"
[ "CLI", "for", "tonomi", ".", "com", "using", "contrib", "-", "python", "-", "qubell", "-", "client" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/cli/__main__.py#L128-L182
qubell/contrib-python-qubell-client
qubell/cli/__main__.py
import_app
def import_app(files, category, overwrite, id, name): """ Upload application from file. By default, file name will be used as application name, with "-vXX.YYY" suffix stripped. Application is looked up by one of these classifiers, in order of priority: app-id, app-name, filename. If app-id is provided, looks up existing application and updates its manifest. If app-id is NOT specified, looks up by name, or creates new application. """ platform = _get_platform() org = platform.get_organization(QUBELL["organization"]) if category: category = org.categories[category] regex = re.compile(r"^(.*?)(-v(\d+)|)\.[^.]+$") if (id or name) and len(files) > 1: raise Exception("--id and --name are supported only for single-file mode") for filename in files: click.echo("Importing " + filename, nl=False) if not name: match = regex.match(basename(filename)) if not match: click.echo(_color("RED", "FAIL") + " unknown filename format") break name = regex.match(basename(filename)).group(1) click.echo(" => ", nl=False) app = None try: app = org.get_application(id=id, name=name) if app and not overwrite: click.echo("%s %s already exists %s" % ( app.id, _color("BLUE", app and app.name or name), _color("RED", "FAIL"))) break except NotFoundError: if id: click.echo("%s %s not found %s" % ( id or "", _color("BLUE", app and app.name or name), _color("RED", "FAIL"))) break click.echo(_color("BLUE", app and app.name or name) + " ", nl=False) try: with file(filename, "r") as f: if app: app.update(name=app.name, category=category and category.id or app.category, manifest=Manifest(content=f.read())) else: app = org.application(id=id, name=name, manifest=Manifest(content=f.read())) if category: app.update(category=category.id) click.echo(app.id + _color("GREEN", " OK")) except IOError as e: click.echo(_color("RED", " FAIL") + " " + e.message) break
python
def import_app(files, category, overwrite, id, name): """ Upload application from file. By default, file name will be used as application name, with "-vXX.YYY" suffix stripped. Application is looked up by one of these classifiers, in order of priority: app-id, app-name, filename. If app-id is provided, looks up existing application and updates its manifest. If app-id is NOT specified, looks up by name, or creates new application. """ platform = _get_platform() org = platform.get_organization(QUBELL["organization"]) if category: category = org.categories[category] regex = re.compile(r"^(.*?)(-v(\d+)|)\.[^.]+$") if (id or name) and len(files) > 1: raise Exception("--id and --name are supported only for single-file mode") for filename in files: click.echo("Importing " + filename, nl=False) if not name: match = regex.match(basename(filename)) if not match: click.echo(_color("RED", "FAIL") + " unknown filename format") break name = regex.match(basename(filename)).group(1) click.echo(" => ", nl=False) app = None try: app = org.get_application(id=id, name=name) if app and not overwrite: click.echo("%s %s already exists %s" % ( app.id, _color("BLUE", app and app.name or name), _color("RED", "FAIL"))) break except NotFoundError: if id: click.echo("%s %s not found %s" % ( id or "", _color("BLUE", app and app.name or name), _color("RED", "FAIL"))) break click.echo(_color("BLUE", app and app.name or name) + " ", nl=False) try: with file(filename, "r") as f: if app: app.update(name=app.name, category=category and category.id or app.category, manifest=Manifest(content=f.read())) else: app = org.application(id=id, name=name, manifest=Manifest(content=f.read())) if category: app.update(category=category.id) click.echo(app.id + _color("GREEN", " OK")) except IOError as e: click.echo(_color("RED", " FAIL") + " " + e.message) break
[ "def", "import_app", "(", "files", ",", "category", ",", "overwrite", ",", "id", ",", "name", ")", ":", "platform", "=", "_get_platform", "(", ")", "org", "=", "platform", ".", "get_organization", "(", "QUBELL", "[", "\"organization\"", "]", ")", "if", "category", ":", "category", "=", "org", ".", "categories", "[", "category", "]", "regex", "=", "re", ".", "compile", "(", "r\"^(.*?)(-v(\\d+)|)\\.[^.]+$\"", ")", "if", "(", "id", "or", "name", ")", "and", "len", "(", "files", ")", ">", "1", ":", "raise", "Exception", "(", "\"--id and --name are supported only for single-file mode\"", ")", "for", "filename", "in", "files", ":", "click", ".", "echo", "(", "\"Importing \"", "+", "filename", ",", "nl", "=", "False", ")", "if", "not", "name", ":", "match", "=", "regex", ".", "match", "(", "basename", "(", "filename", ")", ")", "if", "not", "match", ":", "click", ".", "echo", "(", "_color", "(", "\"RED\"", ",", "\"FAIL\"", ")", "+", "\" unknown filename format\"", ")", "break", "name", "=", "regex", ".", "match", "(", "basename", "(", "filename", ")", ")", ".", "group", "(", "1", ")", "click", ".", "echo", "(", "\" => \"", ",", "nl", "=", "False", ")", "app", "=", "None", "try", ":", "app", "=", "org", ".", "get_application", "(", "id", "=", "id", ",", "name", "=", "name", ")", "if", "app", "and", "not", "overwrite", ":", "click", ".", "echo", "(", "\"%s %s already exists %s\"", "%", "(", "app", ".", "id", ",", "_color", "(", "\"BLUE\"", ",", "app", "and", "app", ".", "name", "or", "name", ")", ",", "_color", "(", "\"RED\"", ",", "\"FAIL\"", ")", ")", ")", "break", "except", "NotFoundError", ":", "if", "id", ":", "click", ".", "echo", "(", "\"%s %s not found %s\"", "%", "(", "id", "or", "\"\"", ",", "_color", "(", "\"BLUE\"", ",", "app", "and", "app", ".", "name", "or", "name", ")", ",", "_color", "(", "\"RED\"", ",", "\"FAIL\"", ")", ")", ")", "break", "click", ".", "echo", "(", "_color", "(", "\"BLUE\"", ",", "app", "and", "app", ".", "name", "or", "name", ")", "+", "\" \"", ",", "nl", "=", "False", ")", "try", ":", "with", "file", "(", "filename", ",", "\"r\"", ")", "as", "f", ":", "if", "app", ":", "app", ".", "update", "(", "name", "=", "app", ".", "name", ",", "category", "=", "category", "and", "category", ".", "id", "or", "app", ".", "category", ",", "manifest", "=", "Manifest", "(", "content", "=", "f", ".", "read", "(", ")", ")", ")", "else", ":", "app", "=", "org", ".", "application", "(", "id", "=", "id", ",", "name", "=", "name", ",", "manifest", "=", "Manifest", "(", "content", "=", "f", ".", "read", "(", ")", ")", ")", "if", "category", ":", "app", ".", "update", "(", "category", "=", "category", ".", "id", ")", "click", ".", "echo", "(", "app", ".", "id", "+", "_color", "(", "\"GREEN\"", ",", "\" OK\"", ")", ")", "except", "IOError", "as", "e", ":", "click", ".", "echo", "(", "_color", "(", "\"RED\"", ",", "\" FAIL\"", ")", "+", "\" \"", "+", "e", ".", "message", ")", "break" ]
Upload application from file. By default, file name will be used as application name, with "-vXX.YYY" suffix stripped. Application is looked up by one of these classifiers, in order of priority: app-id, app-name, filename. If app-id is provided, looks up existing application and updates its manifest. If app-id is NOT specified, looks up by name, or creates new application.
[ "Upload", "application", "from", "file", "." ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/cli/__main__.py#L351-L405
qubell/contrib-python-qubell-client
qubell/cli/__main__.py
show_account
def show_account(): """ Exports current account configuration in shell-friendly form. Takes into account explicit top-level flags like --organization. """ click.echo("# tonomi api") for (key, env) in REVERSE_MAPPING.items(): value = QUBELL.get(key, None) if value: click.echo("export %s='%s'" % (env, value)) if any(map(lambda x: PROVIDER.get(x), REVERSE_PROVIDER_MAPPING.keys())): click.echo("# cloud account") for (key, env) in REVERSE_PROVIDER_MAPPING.items(): value = PROVIDER.get(key, None) if value: click.echo("export %s='%s'" % (env, value))
python
def show_account(): """ Exports current account configuration in shell-friendly form. Takes into account explicit top-level flags like --organization. """ click.echo("# tonomi api") for (key, env) in REVERSE_MAPPING.items(): value = QUBELL.get(key, None) if value: click.echo("export %s='%s'" % (env, value)) if any(map(lambda x: PROVIDER.get(x), REVERSE_PROVIDER_MAPPING.keys())): click.echo("# cloud account") for (key, env) in REVERSE_PROVIDER_MAPPING.items(): value = PROVIDER.get(key, None) if value: click.echo("export %s='%s'" % (env, value))
[ "def", "show_account", "(", ")", ":", "click", ".", "echo", "(", "\"# tonomi api\"", ")", "for", "(", "key", ",", "env", ")", "in", "REVERSE_MAPPING", ".", "items", "(", ")", ":", "value", "=", "QUBELL", ".", "get", "(", "key", ",", "None", ")", "if", "value", ":", "click", ".", "echo", "(", "\"export %s='%s'\"", "%", "(", "env", ",", "value", ")", ")", "if", "any", "(", "map", "(", "lambda", "x", ":", "PROVIDER", ".", "get", "(", "x", ")", ",", "REVERSE_PROVIDER_MAPPING", ".", "keys", "(", ")", ")", ")", ":", "click", ".", "echo", "(", "\"# cloud account\"", ")", "for", "(", "key", ",", "env", ")", "in", "REVERSE_PROVIDER_MAPPING", ".", "items", "(", ")", ":", "value", "=", "PROVIDER", ".", "get", "(", "key", ",", "None", ")", "if", "value", ":", "click", ".", "echo", "(", "\"export %s='%s'\"", "%", "(", "env", ",", "value", ")", ")" ]
Exports current account configuration in shell-friendly form. Takes into account explicit top-level flags like --organization.
[ "Exports", "current", "account", "configuration", "in", "shell", "-", "friendly", "form", ".", "Takes", "into", "account", "explicit", "top", "-", "level", "flags", "like", "--", "organization", "." ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/cli/__main__.py#L1210-L1226
qubell/contrib-python-qubell-client
qubell/cli/__main__.py
generate_session_token
def generate_session_token(refresh_token, verbose): """ Generates new session token from the given refresh token. :param refresh_token: refresh token to generate from :param verbose: whether expiration time should be added to output """ platform = _get_platform(authenticated=False) session_token, expires_in = platform.generate_session_token(refresh_token) if verbose: click.echo("%s\n\n%s" % (session_token, _color('YELLOW', "Expires in %d seconds" % expires_in))) else: click.echo(session_token)
python
def generate_session_token(refresh_token, verbose): """ Generates new session token from the given refresh token. :param refresh_token: refresh token to generate from :param verbose: whether expiration time should be added to output """ platform = _get_platform(authenticated=False) session_token, expires_in = platform.generate_session_token(refresh_token) if verbose: click.echo("%s\n\n%s" % (session_token, _color('YELLOW', "Expires in %d seconds" % expires_in))) else: click.echo(session_token)
[ "def", "generate_session_token", "(", "refresh_token", ",", "verbose", ")", ":", "platform", "=", "_get_platform", "(", "authenticated", "=", "False", ")", "session_token", ",", "expires_in", "=", "platform", ".", "generate_session_token", "(", "refresh_token", ")", "if", "verbose", ":", "click", ".", "echo", "(", "\"%s\\n\\n%s\"", "%", "(", "session_token", ",", "_color", "(", "'YELLOW'", ",", "\"Expires in %d seconds\"", "%", "expires_in", ")", ")", ")", "else", ":", "click", ".", "echo", "(", "session_token", ")" ]
Generates new session token from the given refresh token. :param refresh_token: refresh token to generate from :param verbose: whether expiration time should be added to output
[ "Generates", "new", "session", "token", "from", "the", "given", "refresh", "token", ".", ":", "param", "refresh_token", ":", "refresh", "token", "to", "generate", "from", ":", "param", "verbose", ":", "whether", "expiration", "time", "should", "be", "added", "to", "output" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/cli/__main__.py#L1232-L1245
astroduff/commah
examples.py
runcommand
def runcommand(cosmology='WMAP5'): """ Example interface commands """ # Return the WMAP5 cosmology concentration predicted for # z=0 range of masses Mi = [1e8, 1e9, 1e10] zi = 0 print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi)) output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi) print(output['c'].flatten()) # Return the WMAP5 cosmology concentration predicted for # z=0 range of masses AND cosmological parameters Mi = [1e8, 1e9, 1e10] zi = 0 print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi)) output, cosmo = commah.run(cosmology=cosmology, zi=zi, Mi=Mi, retcosmo=True) print(output['c'].flatten()) print(cosmo) # Return the WMAP5 cosmology concentration predicted for MW # mass (2e12 Msol) across redshift Mi = 2e12 z = [0, 0.5, 1, 1.5, 2, 2.5] output = commah.run(cosmology=cosmology, zi=0, Mi=Mi, z=z) for zval in z: print("M(z=0)=%s has c(z=%s)=%s" % (Mi, zval, output[output['z'] == zval]['c'].flatten())) # Return the WMAP5 cosmology concentration predicted for MW # mass (2e12 Msol) across redshift Mi = 2e12 zi = [0, 0.5, 1, 1.5, 2, 2.5] output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi) for zval in zi: print("M(z=%s)=%s has concentration %s" % (zval, Mi, output[(output['zi'] == zval) & (output['z'] == zval)]['c'].flatten())) # Return the WMAP5 cosmology concentration and # rarity of high-z cluster Mi = 2e14 zi = 6 output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi) print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi)) print(output['c'].flatten()) print("Mass variance sigma of haloes of mass %s at z=%s" % (Mi, zi)) print(output['sig'].flatten()) print("Fluctuation for haloes of mass %s at z=%s" % (Mi, zi)) print(output['nu'].flatten()) # Return the WMAP5 cosmology accretion rate prediction # for haloes at range of redshift and mass Mi = [1e8, 1e9, 1e10] zi = [0] z = [0, 0.5, 1, 1.5, 2, 2.5] output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi, z=z) for Mval in Mi: print("dM/dt for halo of mass %s at z=%s across redshift %s is: " % (Mval, zi, z)) print(output[output['Mi'] == Mval]['dMdt'].flatten()) # Return the WMAP5 cosmology Halo Mass History for haloes with M(z=0) = 1e8 M = [1e8] z = [0, 0.5, 1, 1.5, 2, 2.5] print("Halo Mass History for z=0 mass of %s across z=%s" % (M, z)) output = commah.run(cosmology=cosmology, zi=0, Mi=M, z=z) print(output['Mz'].flatten()) # Return the WMAP5 cosmology formation redshifts for haloes at # range of redshift and mass M = [1e8, 1e9, 1e10] z = [0] print("Formation Redshifts for haloes of mass %s at z=%s" % (M, z)) output = commah.run(cosmology=cosmology, zi=0, Mi=M, z=z) for Mval in M: print(output[output['Mi'] == Mval]['zf'].flatten()) return("Done")
python
def runcommand(cosmology='WMAP5'): """ Example interface commands """ # Return the WMAP5 cosmology concentration predicted for # z=0 range of masses Mi = [1e8, 1e9, 1e10] zi = 0 print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi)) output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi) print(output['c'].flatten()) # Return the WMAP5 cosmology concentration predicted for # z=0 range of masses AND cosmological parameters Mi = [1e8, 1e9, 1e10] zi = 0 print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi)) output, cosmo = commah.run(cosmology=cosmology, zi=zi, Mi=Mi, retcosmo=True) print(output['c'].flatten()) print(cosmo) # Return the WMAP5 cosmology concentration predicted for MW # mass (2e12 Msol) across redshift Mi = 2e12 z = [0, 0.5, 1, 1.5, 2, 2.5] output = commah.run(cosmology=cosmology, zi=0, Mi=Mi, z=z) for zval in z: print("M(z=0)=%s has c(z=%s)=%s" % (Mi, zval, output[output['z'] == zval]['c'].flatten())) # Return the WMAP5 cosmology concentration predicted for MW # mass (2e12 Msol) across redshift Mi = 2e12 zi = [0, 0.5, 1, 1.5, 2, 2.5] output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi) for zval in zi: print("M(z=%s)=%s has concentration %s" % (zval, Mi, output[(output['zi'] == zval) & (output['z'] == zval)]['c'].flatten())) # Return the WMAP5 cosmology concentration and # rarity of high-z cluster Mi = 2e14 zi = 6 output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi) print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi)) print(output['c'].flatten()) print("Mass variance sigma of haloes of mass %s at z=%s" % (Mi, zi)) print(output['sig'].flatten()) print("Fluctuation for haloes of mass %s at z=%s" % (Mi, zi)) print(output['nu'].flatten()) # Return the WMAP5 cosmology accretion rate prediction # for haloes at range of redshift and mass Mi = [1e8, 1e9, 1e10] zi = [0] z = [0, 0.5, 1, 1.5, 2, 2.5] output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi, z=z) for Mval in Mi: print("dM/dt for halo of mass %s at z=%s across redshift %s is: " % (Mval, zi, z)) print(output[output['Mi'] == Mval]['dMdt'].flatten()) # Return the WMAP5 cosmology Halo Mass History for haloes with M(z=0) = 1e8 M = [1e8] z = [0, 0.5, 1, 1.5, 2, 2.5] print("Halo Mass History for z=0 mass of %s across z=%s" % (M, z)) output = commah.run(cosmology=cosmology, zi=0, Mi=M, z=z) print(output['Mz'].flatten()) # Return the WMAP5 cosmology formation redshifts for haloes at # range of redshift and mass M = [1e8, 1e9, 1e10] z = [0] print("Formation Redshifts for haloes of mass %s at z=%s" % (M, z)) output = commah.run(cosmology=cosmology, zi=0, Mi=M, z=z) for Mval in M: print(output[output['Mi'] == Mval]['zf'].flatten()) return("Done")
[ "def", "runcommand", "(", "cosmology", "=", "'WMAP5'", ")", ":", "# Return the WMAP5 cosmology concentration predicted for", "# z=0 range of masses", "Mi", "=", "[", "1e8", ",", "1e9", ",", "1e10", "]", "zi", "=", "0", "print", "(", "\"Concentrations for haloes of mass %s at z=%s\"", "%", "(", "Mi", ",", "zi", ")", ")", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zi", ",", "Mi", "=", "Mi", ")", "print", "(", "output", "[", "'c'", "]", ".", "flatten", "(", ")", ")", "# Return the WMAP5 cosmology concentration predicted for", "# z=0 range of masses AND cosmological parameters", "Mi", "=", "[", "1e8", ",", "1e9", ",", "1e10", "]", "zi", "=", "0", "print", "(", "\"Concentrations for haloes of mass %s at z=%s\"", "%", "(", "Mi", ",", "zi", ")", ")", "output", ",", "cosmo", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zi", ",", "Mi", "=", "Mi", ",", "retcosmo", "=", "True", ")", "print", "(", "output", "[", "'c'", "]", ".", "flatten", "(", ")", ")", "print", "(", "cosmo", ")", "# Return the WMAP5 cosmology concentration predicted for MW", "# mass (2e12 Msol) across redshift", "Mi", "=", "2e12", "z", "=", "[", "0", ",", "0.5", ",", "1", ",", "1.5", ",", "2", ",", "2.5", "]", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "0", ",", "Mi", "=", "Mi", ",", "z", "=", "z", ")", "for", "zval", "in", "z", ":", "print", "(", "\"M(z=0)=%s has c(z=%s)=%s\"", "%", "(", "Mi", ",", "zval", ",", "output", "[", "output", "[", "'z'", "]", "==", "zval", "]", "[", "'c'", "]", ".", "flatten", "(", ")", ")", ")", "# Return the WMAP5 cosmology concentration predicted for MW", "# mass (2e12 Msol) across redshift", "Mi", "=", "2e12", "zi", "=", "[", "0", ",", "0.5", ",", "1", ",", "1.5", ",", "2", ",", "2.5", "]", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zi", ",", "Mi", "=", "Mi", ")", "for", "zval", "in", "zi", ":", "print", "(", "\"M(z=%s)=%s has concentration %s\"", "%", "(", "zval", ",", "Mi", ",", "output", "[", "(", "output", "[", "'zi'", "]", "==", "zval", ")", "&", "(", "output", "[", "'z'", "]", "==", "zval", ")", "]", "[", "'c'", "]", ".", "flatten", "(", ")", ")", ")", "# Return the WMAP5 cosmology concentration and", "# rarity of high-z cluster", "Mi", "=", "2e14", "zi", "=", "6", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zi", ",", "Mi", "=", "Mi", ")", "print", "(", "\"Concentrations for haloes of mass %s at z=%s\"", "%", "(", "Mi", ",", "zi", ")", ")", "print", "(", "output", "[", "'c'", "]", ".", "flatten", "(", ")", ")", "print", "(", "\"Mass variance sigma of haloes of mass %s at z=%s\"", "%", "(", "Mi", ",", "zi", ")", ")", "print", "(", "output", "[", "'sig'", "]", ".", "flatten", "(", ")", ")", "print", "(", "\"Fluctuation for haloes of mass %s at z=%s\"", "%", "(", "Mi", ",", "zi", ")", ")", "print", "(", "output", "[", "'nu'", "]", ".", "flatten", "(", ")", ")", "# Return the WMAP5 cosmology accretion rate prediction", "# for haloes at range of redshift and mass", "Mi", "=", "[", "1e8", ",", "1e9", ",", "1e10", "]", "zi", "=", "[", "0", "]", "z", "=", "[", "0", ",", "0.5", ",", "1", ",", "1.5", ",", "2", ",", "2.5", "]", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zi", ",", "Mi", "=", "Mi", ",", "z", "=", "z", ")", "for", "Mval", "in", "Mi", ":", "print", "(", "\"dM/dt for halo of mass %s at z=%s across redshift %s is: \"", "%", "(", "Mval", ",", "zi", ",", "z", ")", ")", "print", "(", "output", "[", "output", "[", "'Mi'", "]", "==", "Mval", "]", "[", "'dMdt'", "]", ".", "flatten", "(", ")", ")", "# Return the WMAP5 cosmology Halo Mass History for haloes with M(z=0) = 1e8", "M", "=", "[", "1e8", "]", "z", "=", "[", "0", ",", "0.5", ",", "1", ",", "1.5", ",", "2", ",", "2.5", "]", "print", "(", "\"Halo Mass History for z=0 mass of %s across z=%s\"", "%", "(", "M", ",", "z", ")", ")", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "0", ",", "Mi", "=", "M", ",", "z", "=", "z", ")", "print", "(", "output", "[", "'Mz'", "]", ".", "flatten", "(", ")", ")", "# Return the WMAP5 cosmology formation redshifts for haloes at", "# range of redshift and mass", "M", "=", "[", "1e8", ",", "1e9", ",", "1e10", "]", "z", "=", "[", "0", "]", "print", "(", "\"Formation Redshifts for haloes of mass %s at z=%s\"", "%", "(", "M", ",", "z", ")", ")", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "0", ",", "Mi", "=", "M", ",", "z", "=", "z", ")", "for", "Mval", "in", "M", ":", "print", "(", "output", "[", "output", "[", "'Mi'", "]", "==", "Mval", "]", "[", "'zf'", "]", ".", "flatten", "(", ")", ")", "return", "(", "\"Done\"", ")" ]
Example interface commands
[ "Example", "interface", "commands" ]
train
https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/examples.py#L9-L90
astroduff/commah
examples.py
plotcommand
def plotcommand(cosmology='WMAP5', plotname=None): """ Example ways to interrogate the dataset and plot the commah output """ # Plot the c-M relation as a functon of redshift xarray = 10**(np.arange(1, 15, 0.2)) yval = 'c' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass (M$_{sol}$)" ytitle = r"Concentration" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) plt.ylim([2, 30]) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray) # Access the column yval from the data file yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+str(zval), color=colors[zind]) # Overplot the D08 predictions in black ax.plot(xarray, commah.commah.cduffy(zval, xarray), color="black") ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_CM_relation.png'" % (plotname)) fig.savefig(plotname+"_CM_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the c-z relation as a function of mass (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'c' # Specify the mass range zarray = 10**np.arange(6, 14, 2) xtitle = r"Redshift" ytitle = r"NFW Concentration" linelabel = r"log$_{10}$ M$_{z}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval) # Access the column yval from the data file yarray = output[yval].flatten() # Plot each line in turn with different colours ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_Cz_relation.png'" % (plotname)) fig.savefig(plotname+"_Cz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the zf-z relation for different masses (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'zf' # Specify the mass range zarray = 10**np.arange(6, 14, 2) xtitle = r"Redshift" ytitle = r"Formation Redshift" linelabel = r"log$_{10}$ M$_{z}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_zfz_relation.png'" % (plotname)) fig.savefig(plotname+"_zfz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the dM/dt-z relation for different masses (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'dMdt' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"log$_{10}$ (1+z)" ytitle = r"log$_{10}$ Accretion Rate M$_{sol}$ yr$^{-1}$" linelabel = r"log$_{10}$ M$_z$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) cosmo = commah.getcosmo(cosmology) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval, com=False, mah=True) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(np.log10(xarray+1.), np.log10(yarray), label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) # Plot the semi-analytic approximate formula from Correa et al 2015b semianalytic_approx = 71.6 * (zval / 1e12) * (cosmo['h'] / 0.7) *\ (-0.24 + 0.75 * (xarray + 1)) * np.sqrt( cosmo['omega_M_0'] * (xarray + 1)**3 + cosmo['omega_lambda_0']) ax.plot(np.log10(xarray + 1), np.log10(semianalytic_approx), color='black') leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_dMdtz_relation.png'" % (plotname)) fig.savefig(plotname+"_dMdtz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the dMdt-M relation as a function of redshift xarray = 10**(np.arange(10, 14, 0.5)) yval = 'dMdt' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass M$_{sol}$" ytitle = r"Accretion Rate M$_{sol}$ yr$^{-1}$" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray, com=False, mah=True) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+str(zval), color=colors[zind],) ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_MAH_M_relation.png'" % (plotname)) fig.savefig(plotname+"_MAH_M_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the (dM/M)dt-M relation as a function of redshift xarray = 10**(np.arange(10, 14, 0.5)) yval = 'dMdt' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass M$_{sol}$" ytitle = r"Specific Accretion Rate yr$^{-1}$" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray, mah=True, com=False) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray/xarray, label=linelabel+str(zval), color=colors[zind],) ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_specificMAH_M_relation.png'" % (plotname)) fig.savefig(plotname+"_specificMAH_M_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the Mz-z relation as a function of mass # (so mass is decreasing to zero as z-> inf) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'Mz' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"Redshift" ytitle = r"M(z) (M$_{sol}$)" linelabel = r"log$_{10}$ M$_{0}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_Mzz_relation.png'" % (plotname)) fig.savefig(plotname+"_Mzz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the Mz/M0-z relation as a function of mass xarray = 10**(np.arange(0, 1, 0.02)) - 1 yval = 'Mz' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"Redshift" ytitle = r"log$_{10}$ M(z)/M$_{0}$" linelabel = r"log$_{10}$ M$_{0}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, np.log10(yarray/zval), label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=3) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_MzM0z_relation.png'" % (plotname)) fig.savefig(plotname+"_MzM0z_relation.png", dpi=fig.dpi*5) else: plt.show() return("Done")
python
def plotcommand(cosmology='WMAP5', plotname=None): """ Example ways to interrogate the dataset and plot the commah output """ # Plot the c-M relation as a functon of redshift xarray = 10**(np.arange(1, 15, 0.2)) yval = 'c' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass (M$_{sol}$)" ytitle = r"Concentration" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) plt.ylim([2, 30]) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray) # Access the column yval from the data file yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+str(zval), color=colors[zind]) # Overplot the D08 predictions in black ax.plot(xarray, commah.commah.cduffy(zval, xarray), color="black") ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_CM_relation.png'" % (plotname)) fig.savefig(plotname+"_CM_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the c-z relation as a function of mass (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'c' # Specify the mass range zarray = 10**np.arange(6, 14, 2) xtitle = r"Redshift" ytitle = r"NFW Concentration" linelabel = r"log$_{10}$ M$_{z}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval) # Access the column yval from the data file yarray = output[yval].flatten() # Plot each line in turn with different colours ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_Cz_relation.png'" % (plotname)) fig.savefig(plotname+"_Cz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the zf-z relation for different masses (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'zf' # Specify the mass range zarray = 10**np.arange(6, 14, 2) xtitle = r"Redshift" ytitle = r"Formation Redshift" linelabel = r"log$_{10}$ M$_{z}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_zfz_relation.png'" % (plotname)) fig.savefig(plotname+"_zfz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the dM/dt-z relation for different masses (so always Mz=M0) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'dMdt' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"log$_{10}$ (1+z)" ytitle = r"log$_{10}$ Accretion Rate M$_{sol}$ yr$^{-1}$" linelabel = r"log$_{10}$ M$_z$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) cosmo = commah.getcosmo(cosmology) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval, com=False, mah=True) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(np.log10(xarray+1.), np.log10(yarray), label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) # Plot the semi-analytic approximate formula from Correa et al 2015b semianalytic_approx = 71.6 * (zval / 1e12) * (cosmo['h'] / 0.7) *\ (-0.24 + 0.75 * (xarray + 1)) * np.sqrt( cosmo['omega_M_0'] * (xarray + 1)**3 + cosmo['omega_lambda_0']) ax.plot(np.log10(xarray + 1), np.log10(semianalytic_approx), color='black') leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_dMdtz_relation.png'" % (plotname)) fig.savefig(plotname+"_dMdtz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the dMdt-M relation as a function of redshift xarray = 10**(np.arange(10, 14, 0.5)) yval = 'dMdt' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass M$_{sol}$" ytitle = r"Accretion Rate M$_{sol}$ yr$^{-1}$" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray, com=False, mah=True) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+str(zval), color=colors[zind],) ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=2) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_MAH_M_relation.png'" % (plotname)) fig.savefig(plotname+"_MAH_M_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the (dM/M)dt-M relation as a function of redshift xarray = 10**(np.arange(10, 14, 0.5)) yval = 'dMdt' # Specify the redshift range zarray = np.arange(0, 5, 0.5) xtitle = r"Halo Mass M$_{sol}$" ytitle = r"Specific Accretion Rate yr$^{-1}$" linelabel = "z=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray, mah=True, com=False) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray/xarray, label=linelabel+str(zval), color=colors[zind],) ax.set_xscale('log') ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_specificMAH_M_relation.png'" % (plotname)) fig.savefig(plotname+"_specificMAH_M_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the Mz-z relation as a function of mass # (so mass is decreasing to zero as z-> inf) xarray = 10**(np.arange(0, 1, 0.05)) - 1 yval = 'Mz' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"Redshift" ytitle = r"M(z) (M$_{sol}$)" linelabel = r"log$_{10}$ M$_{0}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, yarray, label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) ax.set_yscale('log') leg = ax.legend(loc=1) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_Mzz_relation.png'" % (plotname)) fig.savefig(plotname+"_Mzz_relation.png", dpi=fig.dpi*5) else: plt.show() # Plot the Mz/M0-z relation as a function of mass xarray = 10**(np.arange(0, 1, 0.02)) - 1 yval = 'Mz' # Specify the mass range zarray = 10**np.arange(10, 14, 0.5) xtitle = r"Redshift" ytitle = r"log$_{10}$ M(z)/M$_{0}$" linelabel = r"log$_{10}$ M$_{0}$(M$_{sol}$)=" fig = plt.figure() ax = fig.add_subplot(111) ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) colors = cm.rainbow(np.linspace(0, 1, len(zarray))) for zind, zval in enumerate(zarray): output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray) yarray = output[yval].flatten() # Plot each line in turn with different colour ax.plot(xarray, np.log10(yarray/zval), label=linelabel+"{0:.1f}".format(np.log10(zval)), color=colors[zind],) leg = ax.legend(loc=3) # Make box totally transparent leg.get_frame().set_alpha(0) leg.get_frame().set_edgecolor('white') for label in leg.get_texts(): label.set_fontsize('small') # the font size for label in leg.get_lines(): label.set_linewidth(4) # the legend line width if plotname: fig.tight_layout(pad=0.2) print("Plotting to '%s_MzM0z_relation.png'" % (plotname)) fig.savefig(plotname+"_MzM0z_relation.png", dpi=fig.dpi*5) else: plt.show() return("Done")
[ "def", "plotcommand", "(", "cosmology", "=", "'WMAP5'", ",", "plotname", "=", "None", ")", ":", "# Plot the c-M relation as a functon of redshift", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "1", ",", "15", ",", "0.2", ")", ")", "yval", "=", "'c'", "# Specify the redshift range", "zarray", "=", "np", ".", "arange", "(", "0", ",", "5", ",", "0.5", ")", "xtitle", "=", "r\"Halo Mass (M$_{sol}$)\"", "ytitle", "=", "r\"Concentration\"", "linelabel", "=", "\"z=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "plt", ".", "ylim", "(", "[", "2", ",", "30", "]", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zval", ",", "Mi", "=", "xarray", ")", "# Access the column yval from the data file", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "str", "(", "zval", ")", ",", "color", "=", "colors", "[", "zind", "]", ")", "# Overplot the D08 predictions in black", "ax", ".", "plot", "(", "xarray", ",", "commah", ".", "commah", ".", "cduffy", "(", "zval", ",", "xarray", ")", ",", "color", "=", "\"black\"", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "1", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_CM_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_CM_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the c-z relation as a function of mass (so always Mz=M0)", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.05", ")", ")", "-", "1", "yval", "=", "'c'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "6", ",", "14", ",", "2", ")", "xtitle", "=", "r\"Redshift\"", "ytitle", "=", "r\"NFW Concentration\"", "linelabel", "=", "r\"log$_{10}$ M$_{z}$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "xarray", ",", "Mi", "=", "zval", ")", "# Access the column yval from the data file", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colours", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "1", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_Cz_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_Cz_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the zf-z relation for different masses (so always Mz=M0)", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.05", ")", ")", "-", "1", "yval", "=", "'zf'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "6", ",", "14", ",", "2", ")", "xtitle", "=", "r\"Redshift\"", "ytitle", "=", "r\"Formation Redshift\"", "linelabel", "=", "r\"log$_{10}$ M$_{z}$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "xarray", ",", "Mi", "=", "zval", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "2", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_zfz_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_zfz_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the dM/dt-z relation for different masses (so always Mz=M0)", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.05", ")", ")", "-", "1", "yval", "=", "'dMdt'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", "xtitle", "=", "r\"log$_{10}$ (1+z)\"", "ytitle", "=", "r\"log$_{10}$ Accretion Rate M$_{sol}$ yr$^{-1}$\"", "linelabel", "=", "r\"log$_{10}$ M$_z$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "cosmo", "=", "commah", ".", "getcosmo", "(", "cosmology", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "xarray", ",", "Mi", "=", "zval", ",", "com", "=", "False", ",", "mah", "=", "True", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "np", ".", "log10", "(", "xarray", "+", "1.", ")", ",", "np", ".", "log10", "(", "yarray", ")", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "# Plot the semi-analytic approximate formula from Correa et al 2015b", "semianalytic_approx", "=", "71.6", "*", "(", "zval", "/", "1e12", ")", "*", "(", "cosmo", "[", "'h'", "]", "/", "0.7", ")", "*", "(", "-", "0.24", "+", "0.75", "*", "(", "xarray", "+", "1", ")", ")", "*", "np", ".", "sqrt", "(", "cosmo", "[", "'omega_M_0'", "]", "*", "(", "xarray", "+", "1", ")", "**", "3", "+", "cosmo", "[", "'omega_lambda_0'", "]", ")", "ax", ".", "plot", "(", "np", ".", "log10", "(", "xarray", "+", "1", ")", ",", "np", ".", "log10", "(", "semianalytic_approx", ")", ",", "color", "=", "'black'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "2", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_dMdtz_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_dMdtz_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the dMdt-M relation as a function of redshift", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", ")", "yval", "=", "'dMdt'", "# Specify the redshift range", "zarray", "=", "np", ".", "arange", "(", "0", ",", "5", ",", "0.5", ")", "xtitle", "=", "r\"Halo Mass M$_{sol}$\"", "ytitle", "=", "r\"Accretion Rate M$_{sol}$ yr$^{-1}$\"", "linelabel", "=", "\"z=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zval", ",", "Mi", "=", "xarray", ",", "com", "=", "False", ",", "mah", "=", "True", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "str", "(", "zval", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "2", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_MAH_M_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_MAH_M_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the (dM/M)dt-M relation as a function of redshift", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", ")", "yval", "=", "'dMdt'", "# Specify the redshift range", "zarray", "=", "np", ".", "arange", "(", "0", ",", "5", ",", "0.5", ")", "xtitle", "=", "r\"Halo Mass M$_{sol}$\"", "ytitle", "=", "r\"Specific Accretion Rate yr$^{-1}$\"", "linelabel", "=", "\"z=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "zval", ",", "Mi", "=", "xarray", ",", "mah", "=", "True", ",", "com", "=", "False", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", "/", "xarray", ",", "label", "=", "linelabel", "+", "str", "(", "zval", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "1", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_specificMAH_M_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_specificMAH_M_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the Mz-z relation as a function of mass", "# (so mass is decreasing to zero as z-> inf)", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.05", ")", ")", "-", "1", "yval", "=", "'Mz'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", "xtitle", "=", "r\"Redshift\"", "ytitle", "=", "r\"M(z) (M$_{sol}$)\"", "linelabel", "=", "r\"log$_{10}$ M$_{0}$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "0", ",", "Mi", "=", "zval", ",", "z", "=", "xarray", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "yarray", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "1", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_Mzz_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_Mzz_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "# Plot the Mz/M0-z relation as a function of mass", "xarray", "=", "10", "**", "(", "np", ".", "arange", "(", "0", ",", "1", ",", "0.02", ")", ")", "-", "1", "yval", "=", "'Mz'", "# Specify the mass range", "zarray", "=", "10", "**", "np", ".", "arange", "(", "10", ",", "14", ",", "0.5", ")", "xtitle", "=", "r\"Redshift\"", "ytitle", "=", "r\"log$_{10}$ M(z)/M$_{0}$\"", "linelabel", "=", "r\"log$_{10}$ M$_{0}$(M$_{sol}$)=\"", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "set_xlabel", "(", "xtitle", ")", "ax", ".", "set_ylabel", "(", "ytitle", ")", "colors", "=", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "len", "(", "zarray", ")", ")", ")", "for", "zind", ",", "zval", "in", "enumerate", "(", "zarray", ")", ":", "output", "=", "commah", ".", "run", "(", "cosmology", "=", "cosmology", ",", "zi", "=", "0", ",", "Mi", "=", "zval", ",", "z", "=", "xarray", ")", "yarray", "=", "output", "[", "yval", "]", ".", "flatten", "(", ")", "# Plot each line in turn with different colour", "ax", ".", "plot", "(", "xarray", ",", "np", ".", "log10", "(", "yarray", "/", "zval", ")", ",", "label", "=", "linelabel", "+", "\"{0:.1f}\"", ".", "format", "(", "np", ".", "log10", "(", "zval", ")", ")", ",", "color", "=", "colors", "[", "zind", "]", ",", ")", "leg", "=", "ax", ".", "legend", "(", "loc", "=", "3", ")", "# Make box totally transparent", "leg", ".", "get_frame", "(", ")", ".", "set_alpha", "(", "0", ")", "leg", ".", "get_frame", "(", ")", ".", "set_edgecolor", "(", "'white'", ")", "for", "label", "in", "leg", ".", "get_texts", "(", ")", ":", "label", ".", "set_fontsize", "(", "'small'", ")", "# the font size", "for", "label", "in", "leg", ".", "get_lines", "(", ")", ":", "label", ".", "set_linewidth", "(", "4", ")", "# the legend line width", "if", "plotname", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.2", ")", "print", "(", "\"Plotting to '%s_MzM0z_relation.png'\"", "%", "(", "plotname", ")", ")", "fig", ".", "savefig", "(", "plotname", "+", "\"_MzM0z_relation.png\"", ",", "dpi", "=", "fig", ".", "dpi", "*", "5", ")", "else", ":", "plt", ".", "show", "(", ")", "return", "(", "\"Done\"", ")" ]
Example ways to interrogate the dataset and plot the commah output
[ "Example", "ways", "to", "interrogate", "the", "dataset", "and", "plot", "the", "commah", "output" ]
train
https://github.com/astroduff/commah/blob/3ec70338c5123a053c79ddcf2cb3beac26bc9137/examples.py#L93-L466
davidblaisonneau-orange/foreman
foreman/itemHostsGroup.py
ItemHostsGroup.enhance
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ self.update({'puppetclasses': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemPuppetClasses)}) self.update({'parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemParameter)}) self.update({'smart_class_parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, ItemSmartClassParameter)})
python
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ self.update({'puppetclasses': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemPuppetClasses)}) self.update({'parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemParameter)}) self.update({'smart_class_parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, ItemSmartClassParameter)})
[ "def", "enhance", "(", "self", ")", ":", "self", ".", "update", "(", "{", "'puppetclasses'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemPuppetClasses", ")", "}", ")", "self", ".", "update", "(", "{", "'parameters'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemParameter", ")", "}", ")", "self", ".", "update", "(", "{", "'smart_class_parameters'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "ItemSmartClassParameter", ")", "}", ")" ]
Function enhance Enhance the object with new item or enhanced items
[ "Function", "enhance", "Enhance", "the", "object", "with", "new", "item", "or", "enhanced", "items" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/itemHostsGroup.py#L38-L53
aroberge/experimental
experimental/core/transforms.py
add_transformers
def add_transformers(line): '''Extract the transformers names from a line of code of the form from __experimental__ import transformer1 [,...] and adds them to the globally known dict ''' assert FROM_EXPERIMENTAL.match(line) line = FROM_EXPERIMENTAL.sub(' ', line) # we now have: " transformer1 [,...]" line = line.split("#")[0] # remove any end of line comments # and insert each transformer as an item in a list for trans in line.replace(' ', '').split(','): import_transformer(trans)
python
def add_transformers(line): '''Extract the transformers names from a line of code of the form from __experimental__ import transformer1 [,...] and adds them to the globally known dict ''' assert FROM_EXPERIMENTAL.match(line) line = FROM_EXPERIMENTAL.sub(' ', line) # we now have: " transformer1 [,...]" line = line.split("#")[0] # remove any end of line comments # and insert each transformer as an item in a list for trans in line.replace(' ', '').split(','): import_transformer(trans)
[ "def", "add_transformers", "(", "line", ")", ":", "assert", "FROM_EXPERIMENTAL", ".", "match", "(", "line", ")", "line", "=", "FROM_EXPERIMENTAL", ".", "sub", "(", "' '", ",", "line", ")", "# we now have: \" transformer1 [,...]\"", "line", "=", "line", ".", "split", "(", "\"#\"", ")", "[", "0", "]", "# remove any end of line comments", "# and insert each transformer as an item in a list", "for", "trans", "in", "line", ".", "replace", "(", "' '", ",", "''", ")", ".", "split", "(", "','", ")", ":", "import_transformer", "(", "trans", ")" ]
Extract the transformers names from a line of code of the form from __experimental__ import transformer1 [,...] and adds them to the globally known dict
[ "Extract", "the", "transformers", "names", "from", "a", "line", "of", "code", "of", "the", "form", "from", "__experimental__", "import", "transformer1", "[", "...", "]", "and", "adds", "them", "to", "the", "globally", "known", "dict" ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/transforms.py#L19-L31
aroberge/experimental
experimental/core/transforms.py
import_transformer
def import_transformer(name): '''If needed, import a transformer, and adds it to the globally known dict The code inside a module where a transformer is defined should be standard Python code, which does not need any transformation. So, we disable the import hook, and let the normal module import do its job - which is faster and likely more reliable than our custom method. ''' if name in transformers: return transformers[name] # We are adding a transformer built from normal/standard Python code. # As we are not performing transformations, we temporarily disable # our import hook, both to avoid potential problems AND because we # found that this resulted in much faster code. hook = sys.meta_path[0] sys.meta_path = sys.meta_path[1:] try: transformers[name] = __import__(name) # Some transformers are not allowed in the console. # If an attempt is made to activate one of them in the console, # we replace it by a transformer that does nothing and print a # message specific to that transformer as written in its module. if CONSOLE_ACTIVE: if hasattr(transformers[name], "NO_CONSOLE"): print(transformers[name].NO_CONSOLE) transformers[name] = NullTransformer() except ImportError: sys.stderr.write("Warning: Import Error in add_transformers: %s not found\n" % name) transformers[name] = NullTransformer() except Exception as e: sys.stderr.write("Unexpected exception in transforms.import_transformer%s\n " % e.__class__.__name__) finally: sys.meta_path.insert(0, hook) # restore import hook return transformers[name]
python
def import_transformer(name): '''If needed, import a transformer, and adds it to the globally known dict The code inside a module where a transformer is defined should be standard Python code, which does not need any transformation. So, we disable the import hook, and let the normal module import do its job - which is faster and likely more reliable than our custom method. ''' if name in transformers: return transformers[name] # We are adding a transformer built from normal/standard Python code. # As we are not performing transformations, we temporarily disable # our import hook, both to avoid potential problems AND because we # found that this resulted in much faster code. hook = sys.meta_path[0] sys.meta_path = sys.meta_path[1:] try: transformers[name] = __import__(name) # Some transformers are not allowed in the console. # If an attempt is made to activate one of them in the console, # we replace it by a transformer that does nothing and print a # message specific to that transformer as written in its module. if CONSOLE_ACTIVE: if hasattr(transformers[name], "NO_CONSOLE"): print(transformers[name].NO_CONSOLE) transformers[name] = NullTransformer() except ImportError: sys.stderr.write("Warning: Import Error in add_transformers: %s not found\n" % name) transformers[name] = NullTransformer() except Exception as e: sys.stderr.write("Unexpected exception in transforms.import_transformer%s\n " % e.__class__.__name__) finally: sys.meta_path.insert(0, hook) # restore import hook return transformers[name]
[ "def", "import_transformer", "(", "name", ")", ":", "if", "name", "in", "transformers", ":", "return", "transformers", "[", "name", "]", "# We are adding a transformer built from normal/standard Python code.", "# As we are not performing transformations, we temporarily disable", "# our import hook, both to avoid potential problems AND because we", "# found that this resulted in much faster code.", "hook", "=", "sys", ".", "meta_path", "[", "0", "]", "sys", ".", "meta_path", "=", "sys", ".", "meta_path", "[", "1", ":", "]", "try", ":", "transformers", "[", "name", "]", "=", "__import__", "(", "name", ")", "# Some transformers are not allowed in the console.", "# If an attempt is made to activate one of them in the console,", "# we replace it by a transformer that does nothing and print a", "# message specific to that transformer as written in its module.", "if", "CONSOLE_ACTIVE", ":", "if", "hasattr", "(", "transformers", "[", "name", "]", ",", "\"NO_CONSOLE\"", ")", ":", "print", "(", "transformers", "[", "name", "]", ".", "NO_CONSOLE", ")", "transformers", "[", "name", "]", "=", "NullTransformer", "(", ")", "except", "ImportError", ":", "sys", ".", "stderr", ".", "write", "(", "\"Warning: Import Error in add_transformers: %s not found\\n\"", "%", "name", ")", "transformers", "[", "name", "]", "=", "NullTransformer", "(", ")", "except", "Exception", "as", "e", ":", "sys", ".", "stderr", ".", "write", "(", "\"Unexpected exception in transforms.import_transformer%s\\n \"", "%", "e", ".", "__class__", ".", "__name__", ")", "finally", ":", "sys", ".", "meta_path", ".", "insert", "(", "0", ",", "hook", ")", "# restore import hook", "return", "transformers", "[", "name", "]" ]
If needed, import a transformer, and adds it to the globally known dict The code inside a module where a transformer is defined should be standard Python code, which does not need any transformation. So, we disable the import hook, and let the normal module import do its job - which is faster and likely more reliable than our custom method.
[ "If", "needed", "import", "a", "transformer", "and", "adds", "it", "to", "the", "globally", "known", "dict", "The", "code", "inside", "a", "module", "where", "a", "transformer", "is", "defined", "should", "be", "standard", "Python", "code", "which", "does", "not", "need", "any", "transformation", ".", "So", "we", "disable", "the", "import", "hook", "and", "let", "the", "normal", "module", "import", "do", "its", "job", "-", "which", "is", "faster", "and", "likely", "more", "reliable", "than", "our", "custom", "method", "." ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/transforms.py#L34-L70
aroberge/experimental
experimental/core/transforms.py
extract_transformers_from_source
def extract_transformers_from_source(source): '''Scan a source for lines of the form from __experimental__ import transformer1 [,...] identifying transformers to be used. Such line is passed to the add_transformer function, after which it is removed from the code to be executed. ''' lines = source.split('\n') linenumbers = [] for number, line in enumerate(lines): if FROM_EXPERIMENTAL.match(line): add_transformers(line) linenumbers.insert(0, number) # drop the "fake" import from the source code for number in linenumbers: del lines[number] return '\n'.join(lines)
python
def extract_transformers_from_source(source): '''Scan a source for lines of the form from __experimental__ import transformer1 [,...] identifying transformers to be used. Such line is passed to the add_transformer function, after which it is removed from the code to be executed. ''' lines = source.split('\n') linenumbers = [] for number, line in enumerate(lines): if FROM_EXPERIMENTAL.match(line): add_transformers(line) linenumbers.insert(0, number) # drop the "fake" import from the source code for number in linenumbers: del lines[number] return '\n'.join(lines)
[ "def", "extract_transformers_from_source", "(", "source", ")", ":", "lines", "=", "source", ".", "split", "(", "'\\n'", ")", "linenumbers", "=", "[", "]", "for", "number", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "if", "FROM_EXPERIMENTAL", ".", "match", "(", "line", ")", ":", "add_transformers", "(", "line", ")", "linenumbers", ".", "insert", "(", "0", ",", "number", ")", "# drop the \"fake\" import from the source code", "for", "number", "in", "linenumbers", ":", "del", "lines", "[", "number", "]", "return", "'\\n'", ".", "join", "(", "lines", ")" ]
Scan a source for lines of the form from __experimental__ import transformer1 [,...] identifying transformers to be used. Such line is passed to the add_transformer function, after which it is removed from the code to be executed.
[ "Scan", "a", "source", "for", "lines", "of", "the", "form", "from", "__experimental__", "import", "transformer1", "[", "...", "]", "identifying", "transformers", "to", "be", "used", ".", "Such", "line", "is", "passed", "to", "the", "add_transformer", "function", "after", "which", "it", "is", "removed", "from", "the", "code", "to", "be", "executed", "." ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/transforms.py#L72-L89
aroberge/experimental
experimental/core/transforms.py
remove_not_allowed_in_console
def remove_not_allowed_in_console(): '''This function should be called from the console, when it starts. Some transformers are not allowed in the console and they could have been loaded prior to the console being activated. We effectively remove them and print an information message specific to that transformer as written in the transformer module. ''' not_allowed_in_console = [] if CONSOLE_ACTIVE: for name in transformers: tr_module = import_transformer(name) if hasattr(tr_module, "NO_CONSOLE"): not_allowed_in_console.append((name, tr_module)) for name, tr_module in not_allowed_in_console: print(tr_module.NO_CONSOLE) # Note: we do not remove them, so as to avoid seeing the # information message displayed again if an attempt is # made to re-import them from a console instruction. transformers[name] = NullTransformer()
python
def remove_not_allowed_in_console(): '''This function should be called from the console, when it starts. Some transformers are not allowed in the console and they could have been loaded prior to the console being activated. We effectively remove them and print an information message specific to that transformer as written in the transformer module. ''' not_allowed_in_console = [] if CONSOLE_ACTIVE: for name in transformers: tr_module = import_transformer(name) if hasattr(tr_module, "NO_CONSOLE"): not_allowed_in_console.append((name, tr_module)) for name, tr_module in not_allowed_in_console: print(tr_module.NO_CONSOLE) # Note: we do not remove them, so as to avoid seeing the # information message displayed again if an attempt is # made to re-import them from a console instruction. transformers[name] = NullTransformer()
[ "def", "remove_not_allowed_in_console", "(", ")", ":", "not_allowed_in_console", "=", "[", "]", "if", "CONSOLE_ACTIVE", ":", "for", "name", "in", "transformers", ":", "tr_module", "=", "import_transformer", "(", "name", ")", "if", "hasattr", "(", "tr_module", ",", "\"NO_CONSOLE\"", ")", ":", "not_allowed_in_console", ".", "append", "(", "(", "name", ",", "tr_module", ")", ")", "for", "name", ",", "tr_module", "in", "not_allowed_in_console", ":", "print", "(", "tr_module", ".", "NO_CONSOLE", ")", "# Note: we do not remove them, so as to avoid seeing the", "# information message displayed again if an attempt is", "# made to re-import them from a console instruction.", "transformers", "[", "name", "]", "=", "NullTransformer", "(", ")" ]
This function should be called from the console, when it starts. Some transformers are not allowed in the console and they could have been loaded prior to the console being activated. We effectively remove them and print an information message specific to that transformer as written in the transformer module.
[ "This", "function", "should", "be", "called", "from", "the", "console", "when", "it", "starts", "." ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/transforms.py#L91-L111
aroberge/experimental
experimental/core/transforms.py
transform
def transform(source): '''Used to convert the source code, making use of known transformers. "transformers" are modules which must contain a function transform_source(source) which returns a tranformed source. Some transformers (for example, those found in the standard library module lib2to3) cannot cope with non-standard syntax; as a result, they may fail during a first attempt. We keep track of all failing transformers and keep retrying them until either they all succeeded or a fixed set of them fails twice in a row. ''' source = extract_transformers_from_source(source) # Some transformer fail when multiple non-Python constructs # are present. So, we loop multiple times keeping track of # which transformations have been unsuccessfully performed. not_done = transformers while True: failed = {} for name in not_done: tr_module = import_transformer(name) try: source = tr_module.transform_source(source) except Exception as e: failed[name] = tr_module # from traceback import print_exc # print("Unexpected exception in transforms.transform", # e.__class__.__name__) # print_exc() if not failed: break # Insanity is doing the same Tting over and overaAgain and # expecting different results ... # If the exact same set of transformations are not performed # twice in a row, there is no point in trying out a third time. if failed == not_done: print("Warning: the following transforms could not be done:") for key in failed: print(key) break not_done = failed # attempt another pass return source
python
def transform(source): '''Used to convert the source code, making use of known transformers. "transformers" are modules which must contain a function transform_source(source) which returns a tranformed source. Some transformers (for example, those found in the standard library module lib2to3) cannot cope with non-standard syntax; as a result, they may fail during a first attempt. We keep track of all failing transformers and keep retrying them until either they all succeeded or a fixed set of them fails twice in a row. ''' source = extract_transformers_from_source(source) # Some transformer fail when multiple non-Python constructs # are present. So, we loop multiple times keeping track of # which transformations have been unsuccessfully performed. not_done = transformers while True: failed = {} for name in not_done: tr_module = import_transformer(name) try: source = tr_module.transform_source(source) except Exception as e: failed[name] = tr_module # from traceback import print_exc # print("Unexpected exception in transforms.transform", # e.__class__.__name__) # print_exc() if not failed: break # Insanity is doing the same Tting over and overaAgain and # expecting different results ... # If the exact same set of transformations are not performed # twice in a row, there is no point in trying out a third time. if failed == not_done: print("Warning: the following transforms could not be done:") for key in failed: print(key) break not_done = failed # attempt another pass return source
[ "def", "transform", "(", "source", ")", ":", "source", "=", "extract_transformers_from_source", "(", "source", ")", "# Some transformer fail when multiple non-Python constructs", "# are present. So, we loop multiple times keeping track of", "# which transformations have been unsuccessfully performed.", "not_done", "=", "transformers", "while", "True", ":", "failed", "=", "{", "}", "for", "name", "in", "not_done", ":", "tr_module", "=", "import_transformer", "(", "name", ")", "try", ":", "source", "=", "tr_module", ".", "transform_source", "(", "source", ")", "except", "Exception", "as", "e", ":", "failed", "[", "name", "]", "=", "tr_module", "# from traceback import print_exc", "# print(\"Unexpected exception in transforms.transform\",", "# e.__class__.__name__)", "# print_exc()", "if", "not", "failed", ":", "break", "# Insanity is doing the same Tting over and overaAgain and", "# expecting different results ...", "# If the exact same set of transformations are not performed", "# twice in a row, there is no point in trying out a third time.", "if", "failed", "==", "not_done", ":", "print", "(", "\"Warning: the following transforms could not be done:\"", ")", "for", "key", "in", "failed", ":", "print", "(", "key", ")", "break", "not_done", "=", "failed", "# attempt another pass", "return", "source" ]
Used to convert the source code, making use of known transformers. "transformers" are modules which must contain a function transform_source(source) which returns a tranformed source. Some transformers (for example, those found in the standard library module lib2to3) cannot cope with non-standard syntax; as a result, they may fail during a first attempt. We keep track of all failing transformers and keep retrying them until either they all succeeded or a fixed set of them fails twice in a row.
[ "Used", "to", "convert", "the", "source", "code", "making", "use", "of", "known", "transformers", "." ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/core/transforms.py#L114-L160
bradmontgomery/django-chosenadmin
chosenadmin/middleware.py
ChosenAdminMiddleware._match
def _match(self, request, response): """Match all requests/responses that satisfy the following conditions: * An Admin App; i.e. the path is something like /admin/some_app/ * The ``include_flag`` is not in the response's content """ is_html = 'text/html' in response.get('Content-Type', '') if is_html and hasattr(response, 'rendered_content'): correct_path = PATH_MATCHER.match(request.path) is not None not_included = self.include_flag not in response.rendered_content return correct_path and not_included return False
python
def _match(self, request, response): """Match all requests/responses that satisfy the following conditions: * An Admin App; i.e. the path is something like /admin/some_app/ * The ``include_flag`` is not in the response's content """ is_html = 'text/html' in response.get('Content-Type', '') if is_html and hasattr(response, 'rendered_content'): correct_path = PATH_MATCHER.match(request.path) is not None not_included = self.include_flag not in response.rendered_content return correct_path and not_included return False
[ "def", "_match", "(", "self", ",", "request", ",", "response", ")", ":", "is_html", "=", "'text/html'", "in", "response", ".", "get", "(", "'Content-Type'", ",", "''", ")", "if", "is_html", "and", "hasattr", "(", "response", ",", "'rendered_content'", ")", ":", "correct_path", "=", "PATH_MATCHER", ".", "match", "(", "request", ".", "path", ")", "is", "not", "None", "not_included", "=", "self", ".", "include_flag", "not", "in", "response", ".", "rendered_content", "return", "correct_path", "and", "not_included", "return", "False" ]
Match all requests/responses that satisfy the following conditions: * An Admin App; i.e. the path is something like /admin/some_app/ * The ``include_flag`` is not in the response's content
[ "Match", "all", "requests", "/", "responses", "that", "satisfy", "the", "following", "conditions", ":" ]
train
https://github.com/bradmontgomery/django-chosenadmin/blob/23fad0151d1175d751599a7ee3a1ff35b2d61299/chosenadmin/middleware.py#L24-L36
bradmontgomery/django-chosenadmin
chosenadmin/middleware.py
ChosenAdminMiddleware._chosen_css
def _chosen_css(self): """Read the minified CSS file including STATIC_URL in the references to the sprite images.""" css = render_to_string(self.css_template, {}) for sprite in self.chosen_sprites: # rewrite path to sprites in the css css = css.replace(sprite, settings.STATIC_URL + "img/" + sprite) return css
python
def _chosen_css(self): """Read the minified CSS file including STATIC_URL in the references to the sprite images.""" css = render_to_string(self.css_template, {}) for sprite in self.chosen_sprites: # rewrite path to sprites in the css css = css.replace(sprite, settings.STATIC_URL + "img/" + sprite) return css
[ "def", "_chosen_css", "(", "self", ")", ":", "css", "=", "render_to_string", "(", "self", ".", "css_template", ",", "{", "}", ")", "for", "sprite", "in", "self", ".", "chosen_sprites", ":", "# rewrite path to sprites in the css", "css", "=", "css", ".", "replace", "(", "sprite", ",", "settings", ".", "STATIC_URL", "+", "\"img/\"", "+", "sprite", ")", "return", "css" ]
Read the minified CSS file including STATIC_URL in the references to the sprite images.
[ "Read", "the", "minified", "CSS", "file", "including", "STATIC_URL", "in", "the", "references", "to", "the", "sprite", "images", "." ]
train
https://github.com/bradmontgomery/django-chosenadmin/blob/23fad0151d1175d751599a7ee3a1ff35b2d61299/chosenadmin/middleware.py#L38-L44
bradmontgomery/django-chosenadmin
chosenadmin/middleware.py
ChosenAdminMiddleware._embed
def _embed(self, request, response): """Embed Chosen.js directly in html of the response.""" if self._match(request, response): # Render the <link> and the <script> tags to include Chosen. head = render_to_string( "chosenadmin/_head_css.html", {"chosen_css": self._chosen_css()} ) body = render_to_string( "chosenadmin/_script.html", {"chosen_js": self._chosen_js()} ) # Re-write the Response's content to include our new html content = response.rendered_content content = content.replace('</head>', head) content = content.replace('</body>', body) response.content = content return response
python
def _embed(self, request, response): """Embed Chosen.js directly in html of the response.""" if self._match(request, response): # Render the <link> and the <script> tags to include Chosen. head = render_to_string( "chosenadmin/_head_css.html", {"chosen_css": self._chosen_css()} ) body = render_to_string( "chosenadmin/_script.html", {"chosen_js": self._chosen_js()} ) # Re-write the Response's content to include our new html content = response.rendered_content content = content.replace('</head>', head) content = content.replace('</body>', body) response.content = content return response
[ "def", "_embed", "(", "self", ",", "request", ",", "response", ")", ":", "if", "self", ".", "_match", "(", "request", ",", "response", ")", ":", "# Render the <link> and the <script> tags to include Chosen.", "head", "=", "render_to_string", "(", "\"chosenadmin/_head_css.html\"", ",", "{", "\"chosen_css\"", ":", "self", ".", "_chosen_css", "(", ")", "}", ")", "body", "=", "render_to_string", "(", "\"chosenadmin/_script.html\"", ",", "{", "\"chosen_js\"", ":", "self", ".", "_chosen_js", "(", ")", "}", ")", "# Re-write the Response's content to include our new html", "content", "=", "response", ".", "rendered_content", "content", "=", "content", ".", "replace", "(", "'</head>'", ",", "head", ")", "content", "=", "content", ".", "replace", "(", "'</body>'", ",", "body", ")", "response", ".", "content", "=", "content", "return", "response" ]
Embed Chosen.js directly in html of the response.
[ "Embed", "Chosen", ".", "js", "directly", "in", "html", "of", "the", "response", "." ]
train
https://github.com/bradmontgomery/django-chosenadmin/blob/23fad0151d1175d751599a7ee3a1ff35b2d61299/chosenadmin/middleware.py#L50-L68
fchorney/rpI2C
rpI2C.py
I2C.clean_up
def clean_up(self): """ Close the I2C bus """ self.log.debug("Closing I2C bus for address: 0x%02X" % self.address) self.bus.close()
python
def clean_up(self): """ Close the I2C bus """ self.log.debug("Closing I2C bus for address: 0x%02X" % self.address) self.bus.close()
[ "def", "clean_up", "(", "self", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Closing I2C bus for address: 0x%02X\"", "%", "self", ".", "address", ")", "self", ".", "bus", ".", "close", "(", ")" ]
Close the I2C bus
[ "Close", "the", "I2C", "bus" ]
train
https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L30-L35
fchorney/rpI2C
rpI2C.py
I2C.write_quick
def write_quick(self): """ Send only the read / write bit """ self.bus.write_quick(self.address) self.log.debug("write_quick: Sent the read / write bit")
python
def write_quick(self): """ Send only the read / write bit """ self.bus.write_quick(self.address) self.log.debug("write_quick: Sent the read / write bit")
[ "def", "write_quick", "(", "self", ")", ":", "self", ".", "bus", ".", "write_quick", "(", "self", ".", "address", ")", "self", ".", "log", ".", "debug", "(", "\"write_quick: Sent the read / write bit\"", ")" ]
Send only the read / write bit
[ "Send", "only", "the", "read", "/", "write", "bit" ]
train
https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L39-L44
fchorney/rpI2C
rpI2C.py
I2C.write_byte
def write_byte(self, cmd, value): """ Writes an 8-bit byte to the specified command register """ self.bus.write_byte_data(self.address, cmd, value) self.log.debug( "write_byte: Wrote 0x%02X to command register 0x%02X" % ( value, cmd ) )
python
def write_byte(self, cmd, value): """ Writes an 8-bit byte to the specified command register """ self.bus.write_byte_data(self.address, cmd, value) self.log.debug( "write_byte: Wrote 0x%02X to command register 0x%02X" % ( value, cmd ) )
[ "def", "write_byte", "(", "self", ",", "cmd", ",", "value", ")", ":", "self", ".", "bus", ".", "write_byte_data", "(", "self", ".", "address", ",", "cmd", ",", "value", ")", "self", ".", "log", ".", "debug", "(", "\"write_byte: Wrote 0x%02X to command register 0x%02X\"", "%", "(", "value", ",", "cmd", ")", ")" ]
Writes an 8-bit byte to the specified command register
[ "Writes", "an", "8", "-", "bit", "byte", "to", "the", "specified", "command", "register" ]
train
https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L46-L55
fchorney/rpI2C
rpI2C.py
I2C.write_word
def write_word(self, cmd, value): """ Writes a 16-bit word to the specified command register """ self.bus.write_word_data(self.address, cmd, value) self.log.debug( "write_word: Wrote 0x%04X to command register 0x%02X" % ( value, cmd ) )
python
def write_word(self, cmd, value): """ Writes a 16-bit word to the specified command register """ self.bus.write_word_data(self.address, cmd, value) self.log.debug( "write_word: Wrote 0x%04X to command register 0x%02X" % ( value, cmd ) )
[ "def", "write_word", "(", "self", ",", "cmd", ",", "value", ")", ":", "self", ".", "bus", ".", "write_word_data", "(", "self", ".", "address", ",", "cmd", ",", "value", ")", "self", ".", "log", ".", "debug", "(", "\"write_word: Wrote 0x%04X to command register 0x%02X\"", "%", "(", "value", ",", "cmd", ")", ")" ]
Writes a 16-bit word to the specified command register
[ "Writes", "a", "16", "-", "bit", "word", "to", "the", "specified", "command", "register" ]
train
https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L57-L66
fchorney/rpI2C
rpI2C.py
I2C.write_raw_byte
def write_raw_byte(self, value): """ Writes an 8-bit byte directly to the bus """ self.bus.write_byte(self.address, value) self.log.debug("write_raw_byte: Wrote 0x%02X" % value)
python
def write_raw_byte(self, value): """ Writes an 8-bit byte directly to the bus """ self.bus.write_byte(self.address, value) self.log.debug("write_raw_byte: Wrote 0x%02X" % value)
[ "def", "write_raw_byte", "(", "self", ",", "value", ")", ":", "self", ".", "bus", ".", "write_byte", "(", "self", ".", "address", ",", "value", ")", "self", ".", "log", ".", "debug", "(", "\"write_raw_byte: Wrote 0x%02X\"", "%", "value", ")" ]
Writes an 8-bit byte directly to the bus
[ "Writes", "an", "8", "-", "bit", "byte", "directly", "to", "the", "bus" ]
train
https://github.com/fchorney/rpI2C/blob/7c60f82fa8c91496a74182373da0b95a13919d6e/rpI2C.py#L68-L73
End of preview (truncated to 100 rows)

Dataset Card for CodeSearchNet corpus

Dataset Summary

CodeSearchNet corpus is a dataset of 2 milllion (comment, code) pairs from opensource libraries hosted on GitHub. It contains code and documentation for several programming languages.

CodeSearchNet corpus was gathered to support the CodeSearchNet challenge, to explore the problem of code retrieval using natural language.

Supported Tasks and Leaderboards

  • language-modeling: The dataset can be used to train a model for modelling programming languages, which consists in building language models for programming languages.

Languages

  • Go programming language
  • Java programming language
  • Javascript programming language
  • PHP programming language
  • Python programming language
  • Ruby programming language

Dataset Structure

Data Instances

A data point consists of a function code along with its documentation. Each data point also contains meta data on the function, such as the repository it was extracted from.

{
  'id': '0',
  'repository_name': 'organisation/repository',
  'func_path_in_repository': 'src/path/to/file.py',
  'func_name': 'func',
  'whole_func_string': 'def func(args):\n"""Docstring"""\n [...]',
  'language': 'python', 
  'func_code_string': '[...]',
  'func_code_tokens': ['def', 'func', '(', 'args', ')', ...],
  'func_documentation_string': 'Docstring',
  'func_documentation_string_tokens': ['Docstring'],
  'split_name': 'train',
  'func_code_url': 'https://github.com/<org>/<repo>/blob/<hash>/src/path/to/file.py#L111-L150'
}

Data Fields

  • id: Arbitrary number
  • repository_name: name of the GitHub repository
  • func_path_in_repository: tl;dr: path to the file which holds the function in the repository
  • func_name: name of the function in the file
  • whole_func_string: Code + documentation of the function
  • language: Programming language in whoch the function is written
  • func_code_string: Function code
  • func_code_tokens: Tokens yielded by Treesitter
  • func_documentation_string: Function documentation
  • func_documentation_string_tokens: Tokens yielded by Treesitter
  • split_name: Name of the split to which the example belongs (one of train, test or valid)
  • func_code_url: URL to the function code on Github

Data Splits

Three splits are available:

  • train
  • test
  • valid

Dataset Creation

Curation Rationale

[More Information Needed]

Source Data

Initial Data Collection and Normalization

All information can be retrieved in the original technical review

Corpus collection:

Corpus has been collected from publicly available open-source non-fork GitHub repositories, using libraries.io to identify all projects which are used by at least one other project, and sort them by “popularity” as indicated by the number of stars and forks.

Then, any projects that do not have a license or whose license does not explicitly permit the re-distribution of parts of the project were removed. Treesitter - GitHub's universal parser - has been used to then tokenize all Go, Java, JavaScript, Python, PHP and Ruby functions (or methods) using and, where available, their respective documentation text using a heuristic regular expression.

Corpus filtering:

Functions without documentation are removed from the corpus. This yields a set of pairs ($c_i$, $d_i$) where ci is some function documented by di. Pairs ($c_i$, $d_i$) are passed through the folllowing preprocessing tasks:

  • Documentation $d_i$ is truncated to the first full paragraph to remove in-depth discussion of function arguments and return values
  • Pairs in which $d_i$ is shorter than three tokens are removed
  • Functions $c_i$ whose implementation is shorter than three lines are removed
  • Functions whose name contains the substring “test” are removed
  • Constructors and standard extenion methods (eg __str__ in Python or toString in Java) are removed
  • Duplicates and near duplicates functions are removed, in order to keep only one version of the function

Who are the source language producers?

OpenSource contributors produced the code and documentations.

The dataset was gatherered and preprocessed automatically.

Annotations

Annotation process

[More Information Needed]

Who are the annotators?

[More Information Needed]

Personal and Sensitive Information

[More Information Needed]

Considerations for Using the Data

Social Impact of Dataset

[More Information Needed]

Discussion of Biases

[More Information Needed]

Other Known Limitations

[More Information Needed]

Additional Information

Dataset Curators

[More Information Needed]

Licensing Information

Each example in the dataset has is extracted from a GitHub repository, and each repository has its own license. Example-wise license information is not (yet) included in this dataset: you will need to find out yourself which license the code is using.

Citation Information

@article{husain2019codesearchnet, title={{CodeSearchNet} challenge: Evaluating the state of semantic code search}, author={Husain, Hamel and Wu, Ho-Hsiang and Gazit, Tiferet and Allamanis, Miltiadis and Brockschmidt, Marc}, journal={arXiv preprint arXiv:1909.09436}, year={2019} }

Contributions

Thanks to @SBrandeis for adding this dataset.

Update on GitHub
Papers with Code

Models trained or fine-tuned on code_search_net

Spaces using code_search_net