input
stringlengths
11
7.65k
target
stringlengths
22
8.26k
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def deeplift_grad(module, grad_input, grad_output): """The backward hook which computes the deeplift gradient for an nn.Module """ # first, get the module type module_type = module.__class__.__name__ # first, check the module is supported if module_type in op_handler: if op_handler[module_type].__name__ not in ['passthrough', 'linear_1d']: return op_handler[module_type](module, grad_input, grad_output) else: print('Warning: unrecognized nn.Module: {}'.format(module_type)) return grad_input
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def open_pager(self): "Open the selected item with the system's pager" data = self.get_selected_item() if data['type'] == 'Submission': text = '\n\n'.join((data['permalink'], data['text'])) self.term.open_pager(text) elif data['type'] == 'Comment': text = '\n\n'.join((data['permalink'], data['body'])) self.term.open_pager(text) else: self.term.flash()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def add_interim_values(module, input, output): """The forward hook used to save interim tensors, detached from the graph. Used to calculate the multipliers """ try: del module.x except AttributeError: pass try: del module.y except AttributeError: pass module_type = module.__class__.__name__ if module_type in op_handler: func_name = op_handler[module_type].__name__ # First, check for cases where we don't need to save the x and y tensors if func_name == 'passthrough': pass else: # check only the 0th input varies for i in range(len(input)): if i != 0 and type(output) is tuple: assert input[i] == output[i], "Only the 0th input may vary!" # if a new method is added, it must be added here too. This ensures tensors # are only saved if necessary if func_name in ['maxpool', 'nonlinear_1d']: # only save tensors if necessary if type(input) is tuple: setattr(module, 'x', torch.nn.Parameter(input[0].detach())) else: setattr(module, 'x', torch.nn.Parameter(input.detach())) if type(output) is tuple: setattr(module, 'y', torch.nn.Parameter(output[0].detach())) else: setattr(module, 'y', torch.nn.Parameter(output.detach())) if module_type in failure_case_modules: input[0].register_hook(deeplift_tensor_grad)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def add_comment(self): """ Submit a reply to the selected item. Selected item: Submission - add a top level comment Comment - add a comment reply """ data = self.get_selected_item() if data['type'] == 'Submission': body = data['text'] reply = data['object'].add_comment elif data['type'] == 'Comment': body = data['body'] reply = data['object'].reply else: self.term.flash() return # Construct the text that will be displayed in the editor file. # The post body will be commented out and added for reference lines = ['# |' + line for line in body.split('\n')] content = '\n'.join(lines) comment_info = docs.COMMENT_FILE.format( author=data['author'], type=data['type'].lower(), content=content) with self.term.open_editor(comment_info) as comment: if not comment: self.term.show_notification('Canceled') return with self.term.loader('Posting', delay=0): reply(comment) # Give reddit time to process the submission time.sleep(2.0) if self.term.loader.exception is None: self.refresh_content() else: raise TemporaryFileError()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_target_input(module, input, output): """A forward hook which saves the tensor - attached to its graph. Used if we want to explain the interim outputs of a model """ try: del module.target_input except AttributeError: pass setattr(module, 'target_input', input)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def delete_comment(self): "Delete the selected comment" if self.get_selected_item()['type'] == 'Comment': self.delete_item() else: self.term.flash()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def deeplift_tensor_grad(grad): return_grad = complex_module_gradients[-1] del complex_module_gradients[-1] return return_grad
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def comment_urlview(self): data = self.get_selected_item() comment = data.get('body') or data.get('text') or data.get('url_full') if comment: self.term.open_urlview(comment) else: self.term.flash()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def passthrough(module, grad_input, grad_output): """No change made to gradients""" return None
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _draw_item(self, win, data, inverted): if data['type'] == 'MoreComments': return self._draw_more_comments(win, data) elif data['type'] == 'HiddenComment': return self._draw_more_comments(win, data) elif data['type'] == 'Comment': return self._draw_comment(win, data, inverted) else: return self._draw_submission(win, data)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def maxpool(module, grad_input, grad_output): pool_to_unpool = { 'MaxPool1d': torch.nn.functional.max_unpool1d, 'MaxPool2d': torch.nn.functional.max_unpool2d, 'MaxPool3d': torch.nn.functional.max_unpool3d } pool_to_function = { 'MaxPool1d': torch.nn.functional.max_pool1d, 'MaxPool2d': torch.nn.functional.max_pool2d, 'MaxPool3d': torch.nn.functional.max_pool3d } delta_in = module.x[: int(module.x.shape[0] / 2)] - module.x[int(module.x.shape[0] / 2):] dup0 = [2] + [1 for i in delta_in.shape[1:]] # we also need to check if the output is a tuple y, ref_output = torch.chunk(module.y, 2) cross_max = torch.max(y, ref_output) diffs = torch.cat([cross_max - ref_output, y - cross_max], 0) # all of this just to unpool the outputs with torch.no_grad(): _, indices = pool_to_function[module.__class__.__name__]( module.x, module.kernel_size, module.stride, module.padding, module.dilation, module.ceil_mode, True) xmax_pos, rmax_pos = torch.chunk(pool_to_unpool[module.__class__.__name__]( grad_output[0] * diffs, indices, module.kernel_size, module.stride, module.padding, list(module.x.shape)), 2) org_input_shape = grad_input[0].shape # for the maxpool 1d grad_input = [None for _ in grad_input] grad_input[0] = torch.where(torch.abs(delta_in) < 1e-7, torch.zeros_like(delta_in), (xmax_pos + rmax_pos) / delta_in).repeat(dup0) if module.__class__.__name__ == 'MaxPool1d': complex_module_gradients.append(grad_input[0]) # the grad input that is returned doesn't matter, since it will immediately be # be overridden by the grad in the complex_module_gradient grad_input[0] = torch.ones(org_input_shape) return tuple(grad_input)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _draw_comment(self, win, data, inverted): n_rows, n_cols = win.getmaxyx() n_cols -= 1 # Handle the case where the window is not large enough to fit the text. valid_rows = range(0, n_rows) offset = 0 if not inverted else -(data['n_rows'] - n_rows) # If there isn't enough space to fit the comment body on the screen, # replace the last line with a notification. split_body = data['split_body'] if data['n_rows'] > n_rows: # Only when there is a single comment on the page and not inverted if not inverted and len(self._subwindows) == 0: cutoff = data['n_rows'] - n_rows + 1 split_body = split_body[:-cutoff] split_body.append('(Not enough space to display)') row = offset if row in valid_rows: attr = curses.A_BOLD attr |= (Color.BLUE if not data['is_author'] else Color.GREEN) self.term.add_line(win, '{author} '.format(**data), row, 1, attr) if data['flair']: attr = curses.A_BOLD | Color.YELLOW self.term.add_line(win, '{flair} '.format(**data), attr=attr) text, attr = self.term.get_arrow(data['likes']) self.term.add_line(win, text, attr=attr) self.term.add_line(win, ' {score} {created} '.format(**data)) if data['gold']: text, attr = self.term.guilded self.term.add_line(win, text, attr=attr) if data['stickied']: text, attr = '[stickied]', Color.GREEN self.term.add_line(win, text, attr=attr) if data['saved']: text, attr = '[saved]', Color.GREEN self.term.add_line(win, text, attr=attr) for row, text in enumerate(split_body, start=offset+1): if row in valid_rows: self.term.add_line(win, text, row, 1) # Unfortunately vline() doesn't support custom color so we have to # build it one segment at a time. attr = Color.get_level(data['level']) x = 0 for y in range(n_rows): self.term.addch(win, y, x, self.term.vline, attr) return attr | self.term.vline
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def linear_1d(module, grad_input, grad_output): """No change made to gradients.""" return None
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _draw_more_comments(self, win, data): n_rows, n_cols = win.getmaxyx() n_cols -= 1 self.term.add_line(win, '{body}'.format(**data), 0, 1) self.term.add_line( win, ' [{count}]'.format(**data), attr=curses.A_BOLD) attr = Color.get_level(data['level']) self.term.addch(win, 0, 0, self.term.vline, attr) return attr | self.term.vline
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def nonlinear_1d(module, grad_input, grad_output): delta_out = module.y[: int(module.y.shape[0] / 2)] - module.y[int(module.y.shape[0] / 2):] delta_in = module.x[: int(module.x.shape[0] / 2)] - module.x[int(module.x.shape[0] / 2):] dup0 = [2] + [1 for i in delta_in.shape[1:]] # handles numerical instabilities where delta_in is very small by # just taking the gradient in those cases grads = [None for _ in grad_input] grads[0] = torch.where(torch.abs(delta_in.repeat(dup0)) < 1e-6, grad_input[0], grad_output[0] * (delta_out / delta_in).repeat(dup0)) return tuple(grads)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def wait_time_gen(): count = 0 while True: rand = random.randrange(round(interval.total_seconds())) tmp = round(start + interval.total_seconds() * count + rand - loop.time()) yield tmp count += 1
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def every_day(job, loop=None): return every(job, timedelta=timedelta(days=1), loop=loop)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def every_week(job, loop=None): return every(job, timedelta=timedelta(days=7), loop=loop)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _nearest_weekday(weekday): return datetime.now() + timedelta(days=(weekday - datetime.now().weekday()) % 7)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _every_weekday(job, weekday, loop=None): return every(job, timedelta=timedelta(days=7), start_at=_nearest_weekday(weekday), loop=loop)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_dummy_request(): from rasa.nlu.emulators.no_emulator import NoEmulator em = NoEmulator() norm = em.normalise_request_json({"text": ["arb text"]}) assert norm == {"text": "arb text", "time": None} norm = em.normalise_request_json({"text": ["arb text"], "time": "1499279161658"}) assert norm == {"text": "arb text", "time": "1499279161658"}
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self): ApiCli.__init__(self) self.path = "v1/account/sources/" self.method = "GET"
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_dummy_response(): from rasa.nlu.emulators.no_emulator import NoEmulator em = NoEmulator() data = {"intent": "greet", "text": "hi", "entities": {}, "confidence": 1.0} assert em.normalise_response_json(data) == data
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, root, transforms=None): super().__init__(root=root) self.transforms = transforms self._flow_list = [] self._image_list = []
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _read_img(self, file_name): img = Image.open(file_name) if img.mode != "RGB": img = img.convert("RGB") return img
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _read_flow(self, file_name): # Return the flow or a tuple with the flow and the valid_flow_mask if _has_builtin_flow_mask is True pass
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __getitem__(self, index): img1 = self._read_img(self._image_list[index][0]) img2 = self._read_img(self._image_list[index][1]) if self._flow_list: # it will be empty for some dataset when split="test" flow = self._read_flow(self._flow_list[index]) if self._has_builtin_flow_mask: flow, valid_flow_mask = flow else: valid_flow_mask = None else: flow = valid_flow_mask = None if self.transforms is not None: img1, img2, flow, valid_flow_mask = self.transforms(img1, img2, flow, valid_flow_mask) if self._has_builtin_flow_mask or valid_flow_mask is not None: # The `or valid_flow_mask is not None` part is here because the mask can be generated within a transform return img1, img2, flow, valid_flow_mask else: return img1, img2, flow
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __len__(self): return len(self._image_list)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __rmul__(self, v): return torch.utils.data.ConcatDataset([self] * v)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, root, split="train", pass_name="clean", transforms=None): super().__init__(root=root, transforms=transforms) verify_str_arg(split, "split", valid_values=("train", "test")) verify_str_arg(pass_name, "pass_name", valid_values=("clean", "final", "both")) passes = ["clean", "final"] if pass_name == "both" else [pass_name] root = Path(root) / "Sintel" flow_root = root / "training" / "flow" for pass_name in passes: split_dir = "training" if split == "train" else split image_root = root / split_dir / pass_name for scene in os.listdir(image_root): image_list = sorted(glob(str(image_root / scene / "*.png"))) for i in range(len(image_list) - 1): self._image_list += [[image_list[i], image_list[i + 1]]] if split == "train": self._flow_list += sorted(glob(str(flow_root / scene / "*.flo")))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __getitem__(self, index): """Return example at given index. Args: index(int): The index of the example to retrieve Returns: tuple: A 3-tuple with ``(img1, img2, flow)``. The flow is a numpy array of shape (2, H, W) and the images are PIL images. ``flow`` is None if ``split="test"``. If a valid flow mask is generated within the ``transforms`` parameter, a 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` is returned. """ return super().__getitem__(index)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _read_flow(self, file_name): return _read_flo(file_name)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, root, split="train", transforms=None): super().__init__(root=root, transforms=transforms) verify_str_arg(split, "split", valid_values=("train", "test")) root = Path(root) / "KittiFlow" / (split + "ing") images1 = sorted(glob(str(root / "image_2" / "*_10.png"))) images2 = sorted(glob(str(root / "image_2" / "*_11.png"))) if not images1 or not images2: raise FileNotFoundError( "Could not find the Kitti flow images. Please make sure the directory structure is correct." ) for img1, img2 in zip(images1, images2): self._image_list += [[img1, img2]] if split == "train": self._flow_list = sorted(glob(str(root / "flow_occ" / "*_10.png")))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __getitem__(self, index): """Return example at given index. Args: index(int): The index of the example to retrieve Returns: tuple: A 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` where ``valid_flow_mask`` is a numpy boolean mask of shape (H, W) indicating which flow values are valid. The flow is a numpy array of shape (2, H, W) and the images are PIL images. ``flow`` and ``valid_flow_mask`` are None if ``split="test"``. """ return super().__getitem__(index)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _read_flow(self, file_name): return _read_16bits_png_with_flow_and_valid_mask(file_name)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, root, split="train", transforms=None): super().__init__(root=root, transforms=transforms) verify_str_arg(split, "split", valid_values=("train", "val")) root = Path(root) / "FlyingChairs" images = sorted(glob(str(root / "data" / "*.ppm"))) flows = sorted(glob(str(root / "data" / "*.flo"))) split_file_name = "FlyingChairs_train_val.txt" if not os.path.exists(root / split_file_name): raise FileNotFoundError( "The FlyingChairs_train_val.txt file was not found - please download it from the dataset page (see docstring)." ) split_list = np.loadtxt(str(root / split_file_name), dtype=np.int32) for i in range(len(flows)): split_id = split_list[i] if (split == "train" and split_id == 1) or (split == "val" and split_id == 2): self._flow_list += [flows[i]] self._image_list += [[images[2 * i], images[2 * i + 1]]]
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, root, split="train", pass_name="clean", camera="left", transforms=None): super().__init__(root=root, transforms=transforms) verify_str_arg(split, "split", valid_values=("train", "test")) split = split.upper() verify_str_arg(pass_name, "pass_name", valid_values=("clean", "final", "both")) passes = { "clean": ["frames_cleanpass"], "final": ["frames_finalpass"], "both": ["frames_cleanpass", "frames_finalpass"], }[pass_name] verify_str_arg(camera, "camera", valid_values=("left", "right", "both")) cameras = ["left", "right"] if camera == "both" else [camera] root = Path(root) / "FlyingThings3D" directions = ("into_future", "into_past") for pass_name, camera, direction in itertools.product(passes, cameras, directions): image_dirs = sorted(glob(str(root / pass_name / split / "*/*"))) image_dirs = sorted(Path(image_dir) / camera for image_dir in image_dirs) flow_dirs = sorted(glob(str(root / "optical_flow" / split / "*/*"))) flow_dirs = sorted(Path(flow_dir) / direction / camera for flow_dir in flow_dirs) if not image_dirs or not flow_dirs: raise FileNotFoundError( "Could not find the FlyingThings3D flow images. " "Please make sure the directory structure is correct." ) for image_dir, flow_dir in zip(image_dirs, flow_dirs): images = sorted(glob(str(image_dir / "*.png"))) flows = sorted(glob(str(flow_dir / "*.pfm"))) for i in range(len(flows) - 1): if direction == "into_future": self._image_list += [[images[i], images[i + 1]]] self._flow_list += [flows[i]] elif direction == "into_past": self._image_list += [[images[i + 1], images[i]]] self._flow_list += [flows[i + 1]]
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _read_flow(self, file_name): return _read_pfm(file_name)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, root, split="train", transforms=None): super().__init__(root=root, transforms=transforms) verify_str_arg(split, "split", valid_values=("train", "test")) root = Path(root) / "hd1k" if split == "train": # There are 36 "sequences" and we don't want seq i to overlap with seq i + 1, so we need this for loop for seq_idx in range(36): flows = sorted(glob(str(root / "hd1k_flow_gt" / "flow_occ" / f"{seq_idx:06d}_*.png"))) images = sorted(glob(str(root / "hd1k_input" / "image_2" / f"{seq_idx:06d}_*.png"))) for i in range(len(flows) - 1): self._flow_list += [flows[i]] self._image_list += [[images[i], images[i + 1]]] else: images1 = sorted(glob(str(root / "hd1k_challenge" / "image_2" / "*10.png"))) images2 = sorted(glob(str(root / "hd1k_challenge" / "image_2" / "*11.png"))) for image1, image2 in zip(images1, images2): self._image_list += [[image1, image2]] if not self._image_list: raise FileNotFoundError( "Could not find the HD1K images. Please make sure the directory structure is correct." )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _read_flow(self, file_name): return _read_16bits_png_with_flow_and_valid_mask(file_name)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _read_flo(file_name): """Read .flo file in Middlebury format""" # Code adapted from: # http://stackoverflow.com/questions/28013200/reading-middlebury-flow-files-with-python-bytes-array-numpy # Everything needs to be in little Endian according to # https://vision.middlebury.edu/flow/code/flow-code/README.txt with open(file_name, "rb") as f: magic = np.fromfile(f, "c", count=4).tobytes() if magic != b"PIEH": raise ValueError("Magic number incorrect. Invalid .flo file") w = int(np.fromfile(f, "<i4", count=1)) h = int(np.fromfile(f, "<i4", count=1)) data = np.fromfile(f, "<f4", count=2 * w * h) return data.reshape(h, w, 2).transpose(2, 0, 1)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _read_16bits_png_with_flow_and_valid_mask(file_name): flow_and_valid = _read_png_16(file_name).to(torch.float32) flow, valid_flow_mask = flow_and_valid[:2, :, :], flow_and_valid[2, :, :] flow = (flow - 2 ** 15) / 64 # This conversion is explained somewhere on the kitti archive valid_flow_mask = valid_flow_mask.bool() # For consistency with other datasets, we convert to numpy return flow.numpy(), valid_flow_mask.numpy()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def LogPABotMessage(message): _pabotlog.info(message)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_engine_module_name(): engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar") assert engine.name == "foobar"
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def resource_setup(cls): super(VolumesActionsTest, cls).resource_setup() # Create a test shared volume for attach/detach tests cls.volume = cls.create_volume()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_attach_detach_volume_to_instance(self): """Test attaching and detaching volume to instance""" # Create a server server = self.create_server() # Volume is attached and detached successfully from an instance self.volumes_client.attach_volume(self.volume['id'], instance_uuid=server['id'], mountpoint='/dev/%s' % CONF.compute.volume_device_name) waiters.wait_for_volume_resource_status(self.volumes_client, self.volume['id'], 'in-use') self.volumes_client.detach_volume(self.volume['id']) waiters.wait_for_volume_resource_status(self.volumes_client, self.volume['id'], 'available')
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_volume_bootable(self): """Test setting and retrieving bootable flag of a volume""" for bool_bootable in [True, False]: self.volumes_client.set_bootable_volume(self.volume['id'], bootable=bool_bootable) fetched_volume = self.volumes_client.show_volume( self.volume['id'])['volume'] # Get Volume information # NOTE(masayukig): 'bootable' is "true" or "false" in the current # cinder implementation. So we need to cast boolean values to str # and make it lower to compare here. self.assertEqual(str(bool_bootable).lower(), fetched_volume['bootable'])
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_get_volume_attachment(self): """Test getting volume attachments Attach a volume to a server, and then retrieve volume's attachments info. """ # Create a server server = self.create_server() # Verify that a volume's attachment information is retrieved self.volumes_client.attach_volume(self.volume['id'], instance_uuid=server['id'], mountpoint='/dev/%s' % CONF.compute.volume_device_name) waiters.wait_for_volume_resource_status(self.volumes_client, self.volume['id'], 'in-use') self.addCleanup(waiters.wait_for_volume_resource_status, self.volumes_client, self.volume['id'], 'available') self.addCleanup(self.volumes_client.detach_volume, self.volume['id']) volume = self.volumes_client.show_volume(self.volume['id'])['volume'] attachment = volume['attachments'][0] self.assertEqual('/dev/%s' % CONF.compute.volume_device_name, attachment['device']) self.assertEqual(server['id'], attachment['server_id']) self.assertEqual(self.volume['id'], attachment['id']) self.assertEqual(self.volume['id'], attachment['volume_id'])
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_volume_upload(self): """Test uploading volume to create an image""" # NOTE(gfidente): the volume uploaded in Glance comes from setUpClass, # it is shared with the other tests. After it is uploaded in Glance, # there is no way to delete it from Cinder, so we delete it from Glance # using the Glance images_client and from Cinder via tearDownClass. image_name = data_utils.rand_name(self.__class__.__name__ + '-Image') body = self.volumes_client.upload_volume( self.volume['id'], image_name=image_name, disk_format=CONF.volume.disk_format)['os-volume_upload_image'] image_id = body["image_id"] self.addCleanup(test_utils.call_and_ignore_notfound_exc, self.images_client.delete_image, image_id) waiters.wait_for_image_status(self.images_client, image_id, 'active') waiters.wait_for_volume_resource_status(self.volumes_client, self.volume['id'], 'available') image_info = self.images_client.show_image(image_id) self.assertEqual(image_name, image_info['name']) self.assertEqual(CONF.volume.disk_format, image_info['disk_format'])
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_reserve_unreserve_volume(self): """Test reserving and unreserving volume""" # Mark volume as reserved. self.volumes_client.reserve_volume(self.volume['id']) # To get the volume info body = self.volumes_client.show_volume(self.volume['id'])['volume'] self.assertIn('attaching', body['status']) # Unmark volume as reserved. self.volumes_client.unreserve_volume(self.volume['id']) # To get the volume info body = self.volumes_client.show_volume(self.volume['id'])['volume'] self.assertIn('available', body['status'])
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def setup_loader(request): setup_loader_modules = {pdbedit: {}} with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock: yield loader_mock
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_disk_usage_sensor_is_stateless(): sensor = disk_usage.DiskUsage() ok_([] != sensor.measure())
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_when_no_users_returned_no_data_should_be_returned(verbose): expected_users = {} if verbose else [] with patch.dict( pdbedit.__salt__, { "cmd.run_all": MagicMock( return_value={"stdout": "", "stderr": "", "retcode": 0} ) }, ): actual_users = pdbedit.list_users(verbose=verbose) assert actual_users == expected_users
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_when_verbose_and_retcode_is_nonzero_output_should_be_had(): expected_stderr = "this is something fnord" with patch.dict( pdbedit.__salt__, { "cmd.run_all": MagicMock( return_value={"stdout": "", "stderr": expected_stderr, "retcode": 1} ) }, ), patch("salt.modules.pdbedit.log.error", autospec=True) as fake_error_log: pdbedit.list_users(verbose=True) actual_error = fake_error_log.mock_calls[0].args[0] assert actual_error == expected_stderr
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_when_verbose_and_single_good_output_expected_data_should_be_parsed(): expected_data = { "roscivs": { "unix username": "roscivs", "nt username": "bottia", "full name": "Roscivs Bottia", "user sid": "42", "primary group sid": "99", "home directory": r"\\samba\roscivs", "account desc": "separators! xxx so long and thanks for all the fish", "logoff time": "Sat, 14 Aug 2010 15:06:39 UTC", "kickoff time": "Sat, 14 Aug 2010 15:06:39 UTC", "password must change": "never", } } pdb_output = dedent( r""" Unix username: roscivs NT username: bottia User SID: 42 Primary Group SID: 99 Full Name: Roscivs Bottia Home Directory: \\samba\roscivs Account desc: separators! xxx so long and thanks for all the fish Logoff time: Sat, 14 Aug 2010 15:06:39 UTC Kickoff time: Sat, 14 Aug 2010 15:06:39 UTC Password must change: never """ ).strip() with patch.dict( pdbedit.__salt__, { "cmd.run_all": MagicMock( return_value={"stdout": pdb_output, "stderr": "", "retcode": 0} ) }, ): actual_data = pdbedit.list_users(verbose=True) assert actual_data == expected_data
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def parse_record(self, metadata, line): factors = line.split('|') if len(factors) < 7: return registry, cc, type_, start, value, dete, status = factors[:7] if type_ not in ('ipv4', 'ipv6'): return return Record(metadata, start, type_, value, cc)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def setup_args(parser=None): if parser is None: parser = ParlaiParser(True, True, 'Check tasks for common errors') # Get command line arguments parser.add_argument('-ltim', '--log-every-n-secs', type=float, default=2) parser.add_argument('-d', '--display-examples', type='bool', default=False) parser.set_defaults(datatype='train:stream:ordered') return parser
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_list(arg=None): """get list of messages""" frappe.form_dict['limit_start'] = int(frappe.form_dict['limit_start']) frappe.form_dict['limit_page_length'] = int(frappe.form_dict['limit_page_length']) frappe.form_dict['user'] = frappe.session['user'] # set all messages as read frappe.db.begin() frappe.db.sql("""UPDATE `tabCommunication` set seen = 1 where communication_type in ('Chat', 'Notification') and reference_doctype = 'User' and reference_name = %s""", frappe.session.user) delete_notification_count_for("Messages") frappe.local.flags.commit = True if frappe.form_dict['contact'] == frappe.session['user']: # return messages return frappe.db.sql("""select * from `tabCommunication` where communication_type in ('Chat', 'Notification') and reference_doctype ='User' and (owner=%(contact)s or reference_name=%(user)s or owner=reference_name) order by creation desc limit %(limit_start)s, %(limit_page_length)s""", frappe.local.form_dict, as_dict=1) else: return frappe.db.sql("""select * from `tabCommunication` where communication_type in ('Chat', 'Notification') and reference_doctype ='User' and ((owner=%(contact)s and reference_name=%(user)s) or (owner=%(contact)s and reference_name=%(contact)s)) order by creation desc limit %(limit_start)s, %(limit_page_length)s""", frappe.local.form_dict, as_dict=1)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def report(world, counts, log_time): report = world.report() log = { 'missing_text': counts['missing_text'], 'missing_labels': counts['missing_labels'], 'missing_label_candidates': counts['missing_label_candidates'], 'empty_string_label_candidates': counts['empty_string_label_candidates'], 'label_candidates_with_missing_label': counts[ 'label_candidates_with_missing_label' ], 'did_not_return_message': counts['did_not_return_message'], } text, log = log_time.log(report['exs'], world.num_examples(), log) return text, log
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_active_users(): data = frappe.db.sql("""select name, (select count(*) from tabSessions where user=tabUser.name and timediff(now(), lastupdate) < time("01:00:00")) as has_session from tabUser where enabled=1 and ifnull(user_type, '')!='Website User' and name not in ({}) order by first_name""".format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS, as_dict=1) # make sure current user is at the top, using has_session = 100 users = [d.name for d in data] if frappe.session.user in users: data[users.index(frappe.session.user)]["has_session"] = 100 else: # in case of administrator data.append({"name": frappe.session.user, "has_session": 100}) return data
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def warn(txt, act, opt): if opt.get('display_examples'): print(txt + ":\n" + str(act)) else: warn_once(txt)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def post(txt, contact, parenttype=None, notify=False, subject=None): """post message""" d = frappe.new_doc('Communication') d.communication_type = 'Notification' if parenttype else 'Chat' d.subject = subject d.content = txt d.reference_doctype = 'User' d.reference_name = contact d.sender = frappe.session.user d.insert(ignore_permissions=True) delete_notification_count_for("Messages") if notify and cint(notify): if contact==frappe.session.user: _notify([user.name for user in get_enabled_system_users()], txt) else: _notify(contact, txt, subject) return d
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def verify(opt): if opt['datatype'] == 'train': logging.warning("changing datatype from train to train:ordered") opt['datatype'] = 'train:ordered' opt.log() # create repeat label agent and assign it to the specified task agent = RepeatLabelAgent(opt) world = create_task(opt, agent) log_every_n_secs = opt.get('log_every_n_secs', -1) if log_every_n_secs <= 0: log_every_n_secs = float('inf') log_time = TimeLogger() counts = {} counts['missing_text'] = 0 counts['missing_labels'] = 0 counts['missing_label_candidates'] = 0 counts['empty_string_label_candidates'] = 0 counts['label_candidates_with_missing_label'] = 0 counts['did_not_return_message'] = 0 # Show some example dialogs. while not world.epoch_done(): world.parley() act = world.acts[0] if not isinstance(act, Message): counts['did_not_return_message'] += 1 if 'text' not in act and 'image' not in act: warn("warning: missing text field:\n", act, opt) counts['missing_text'] += 1 if 'labels' not in act and 'eval_labels' not in act: warn("warning: missing labels/eval_labels field:\n", act, opt) counts['missing_labels'] += 1 else: if 'label_candidates' not in act: counts['missing_label_candidates'] += 1 else: labels = act.get('labels', act.get('eval_labels')) is_label_cand = {} for l in labels: is_label_cand[l] = False for c in act['label_candidates']: if c == '': warn("warning: empty string label_candidate:\n", act, opt) counts['empty_string_label_candidates'] += 1 if c in is_label_cand: if is_label_cand[c] is True: warn( "warning: label mentioned twice in candidate_labels:\n", act, opt, ) is_label_cand[c] = True for _, has in is_label_cand.items(): if has is False: warn("warning: label missing in candidate_labels:\n", act, opt) counts['label_candidates_with_missing_label'] += 1 if log_time.time() > log_every_n_secs: text, log = report(world, counts, log_time) print(text) try: # print dataset size if available logging.info( f'Loaded {world.num_episodes()} episodes with a ' f'total of {world.num_examples()} examples' ) except AttributeError: pass counts['exs'] = int(world.report()['exs']) return counts
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def delete(arg=None): frappe.get_doc("Communication", frappe.form_dict['name']).delete()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def verify_data(opt): counts = verify(opt) print(counts) return counts
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def setup_args(cls): return setup_args()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def run(self): return verify_data(self.opt)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def block_size_filter(entity): return ( entity.size[0] * 2 >= entity.size[1] * 2 and entity.size[1] <= 16 and entity.size[3] <= 4 )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, auth_provider): super(SnapshotsClientJSON, self).__init__(auth_provider) self.service = CONF.volume.catalog_type self.build_interval = CONF.volume.build_interval self.build_timeout = CONF.volume.build_timeout
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def list_snapshots(self, params=None): """List all the snapshot.""" url = 'snapshots' if params: url += '?%s' % urllib.urlencode(params) resp, body = self.get(url) body = json.loads(body) return resp, body['snapshots']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def list_snapshots_with_detail(self, params=None): """List the details of all snapshots.""" url = 'snapshots/detail' if params: url += '?%s' % urllib.urlencode(params) resp, body = self.get(url) body = json.loads(body) return resp, body['snapshots']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_snapshot(self, snapshot_id): """Returns the details of a single snapshot.""" url = "snapshots/%s" % str(snapshot_id) resp, body = self.get(url) body = json.loads(body) return resp, body['snapshot']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def create_snapshot(self, volume_id, **kwargs): """ Creates a new snapshot. volume_id(Required): id of the volume. force: Create a snapshot even if the volume attached (Default=False) display_name: Optional snapshot Name. display_description: User friendly snapshot description. """ post_body = {'volume_id': volume_id} post_body.update(kwargs) post_body = json.dumps({'snapshot': post_body}) resp, body = self.post('snapshots', post_body) body = json.loads(body) return resp, body['snapshot']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def update_snapshot(self, snapshot_id, **kwargs): """Updates a snapshot.""" put_body = json.dumps({'snapshot': kwargs}) resp, body = self.put('snapshots/%s' % snapshot_id, put_body) body = json.loads(body) return resp, body['snapshot']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _get_snapshot_status(self, snapshot_id): resp, body = self.get_snapshot(snapshot_id) status = body['status'] # NOTE(afazekas): snapshot can reach an "error" # state in a "normal" lifecycle if (status == 'error'): raise exceptions.SnapshotBuildErrorException( snapshot_id=snapshot_id) return status
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def wait_for_snapshot_status(self, snapshot_id, status): """Waits for a Snapshot to reach a given status.""" start_time = time.time() old_value = value = self._get_snapshot_status(snapshot_id) while True: dtime = time.time() - start_time time.sleep(self.build_interval) if value != old_value: LOG.info('Value transition from "%s" to "%s"' 'in %d second(s).', old_value, value, dtime) if (value == status): return value if dtime > self.build_timeout: message = ('Time Limit Exceeded! (%ds)' 'while waiting for %s, ' 'but we got %s.' % (self.build_timeout, status, value)) raise exceptions.TimeoutException(message) time.sleep(self.build_interval) old_value = value value = self._get_snapshot_status(snapshot_id)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def delete_snapshot(self, snapshot_id): """Delete Snapshot.""" return self.delete("snapshots/%s" % str(snapshot_id))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def is_resource_deleted(self, id): try: self.get_snapshot(id) except exceptions.NotFound: return True return False
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def reset_snapshot_status(self, snapshot_id, status): """Reset the specified snapshot's status.""" post_body = json.dumps({'os-reset_status': {"status": status}}) resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body) return resp, body
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def update_snapshot_status(self, snapshot_id, status, progress): """Update the specified snapshot's status.""" post_body = { 'status': status, 'progress': progress } post_body = json.dumps({'os-update_snapshot_status': post_body}) url = 'snapshots/%s/action' % str(snapshot_id) resp, body = self.post(url, post_body) return resp, body
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def create_snapshot_metadata(self, snapshot_id, metadata): """Create metadata for the snapshot.""" put_body = json.dumps({'metadata': metadata}) url = "snapshots/%s/metadata" % str(snapshot_id) resp, body = self.post(url, put_body) body = json.loads(body) return resp, body['metadata']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_snapshot_metadata(self, snapshot_id): """Get metadata of the snapshot.""" url = "snapshots/%s/metadata" % str(snapshot_id) resp, body = self.get(url) body = json.loads(body) return resp, body['metadata']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def update_snapshot_metadata(self, snapshot_id, metadata): """Update metadata for the snapshot.""" put_body = json.dumps({'metadata': metadata}) url = "snapshots/%s/metadata" % str(snapshot_id) resp, body = self.put(url, put_body) body = json.loads(body) return resp, body['metadata']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def update_snapshot_metadata_item(self, snapshot_id, id, meta_item): """Update metadata item for the snapshot.""" put_body = json.dumps({'meta': meta_item}) url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id)) resp, body = self.put(url, put_body) body = json.loads(body) return resp, body['meta']
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def delete_snapshot_metadata_item(self, snapshot_id, id): """Delete metadata item for the snapshot.""" url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id)) resp, body = self.delete(url) return resp, body
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_stmt_simplify(): ib = tvm.tir.ir_builder.create() A = ib.pointer("float32", name="A") C = ib.pointer("float32", name="C") n = te.size_var("n") with ib.for_range(0, n, name="i") as i: with ib.if_scope(i < 12): A[i] = C[i] body = tvm.tir.LetStmt(n, 10, ib.get()) mod = tvm.IRModule.from_expr(tvm.tir.PrimFunc([A, C, n], body)) body = tvm.tir.transform.Simplify()(mod)["main"].body assert isinstance(body.body, tvm.tir.Store)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_thread_extent_simplify(): ib = tvm.tir.ir_builder.create() A = ib.pointer("float32", name="A") C = ib.pointer("float32", name="C") n = te.size_var("n") tx = te.thread_axis("threadIdx.x") ty = te.thread_axis("threadIdx.y") ib.scope_attr(tx, "thread_extent", n) ib.scope_attr(tx, "thread_extent", n) ib.scope_attr(ty, "thread_extent", 1) with ib.if_scope(tx + ty < 12): A[tx] = C[tx + ty] body = tvm.tir.LetStmt(n, 10, ib.get()) mod = tvm.IRModule.from_expr(tvm.tir.PrimFunc([A, C, n], body)) body = tvm.tir.transform.Simplify()(mod)["main"].body assert isinstance(body.body.body.body, tvm.tir.Store)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_if_likely(): ib = tvm.tir.ir_builder.create() A = ib.pointer("float32", name="A") C = ib.pointer("float32", name="C") n = te.size_var("n") tx = te.thread_axis("threadIdx.x") ty = te.thread_axis("threadIdx.y") ib.scope_attr(tx, "thread_extent", 32) ib.scope_attr(ty, "thread_extent", 32) with ib.if_scope(ib.likely(tx * 32 + ty < n)): with ib.if_scope(ib.likely(tx * 32 + ty < n)): A[tx] = C[tx * 32 + ty] body = ib.get() mod = tvm.IRModule.from_expr(tvm.tir.PrimFunc([A, C, n], body)) body = tvm.tir.transform.Simplify()(mod)["main"].body assert isinstance(body.body.body, tvm.tir.IfThenElse) assert not isinstance(body.body.body.then_case, tvm.tir.IfThenElse)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def name(self): if self._values['name'] is None: return None name = str(self._values['name']).strip() if name == '': raise F5ModuleError( "You must specify a name for this module" ) return name
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def f(i): start = W[i] extent = W[i + 1] - W[i] rv = te.reduce_axis((0, extent)) return te.sum(X[rv + start], axis=rv)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, client): self.client = client self.have = None self.want = Parameters(self.client.module.params) self.changes = Parameters()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def cumsum(X): """ Y[i] = sum(X[:i]) """ (m,) = X.shape s_state = te.placeholder((m + 1,), dtype="int32", name="state") s_init = te.compute((1,), lambda _: tvm.tir.const(0, "int32")) s_update = te.compute((m + 1,), lambda l: s_state[l - 1] + X[l - 1]) return tvm.te.scan(s_init, s_update, s_state, inputs=[X], name="cumsum")
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = Parameters(changed)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def sls(n, d): gg = te.reduce_axis((0, lengths[n])) indices_idx = length_offsets[n] + gg data_idx = indices[indices_idx] data_val = data[data_idx, d] return te.sum(data_val, axis=gg)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _update_changed_options(self): changed = {} for key in Parameters.updatables: if getattr(self.want, key) is not None: attr1 = getattr(self.want, key) attr2 = getattr(self.have, key) if attr1 != attr2: changed[key] = attr1 if changed: self.changes = Parameters(changed) return True return False
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _pool_is_licensed(self): if self.have.state == 'LICENSED': return True return False
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _pool_is_unlicensed_eula_unaccepted(self, current): if current.state != 'LICENSED' and not self.want.accept_eula: return True return False
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def exec_module(self): changed = False result = dict() state = self.want.state try: if state == "present": changed = self.present() elif state == "absent": changed = self.absent() except iControlUnexpectedHTTPError as e: raise F5ModuleError(str(e)) result.update(**self.changes.to_return()) result.update(dict(changed=changed)) return result
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def exists(self): collection = self.client.api.cm.shared.licensing.pools_s.get_collection( requests_params=dict( params="$filter=name+eq+'{0}'".format(self.want.name) ) ) if len(collection) == 1: return True elif len(collection) == 0: return False else: raise F5ModuleError( "Multiple license pools with the provided name were found!" )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def should_update(self): if self._pool_is_licensed(): return False if self._pool_is_unlicensed_eula_unaccepted(): return False return True