text
stringlengths
0
1.28M
import torch class DifferentialDiffusion(): @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL", ), }} RETURN_TYPES = ("MODEL",) FUNCTION = "apply" CATEGORY = "_for_testing" INIT = False def apply(self, model): model = model.clone() model.set_model_denoise_mask_function(self.forward) return (model,) def forward(self, sigma: torch.Tensor, denoise_mask: torch.Tensor, extra_options: dict): model = extra_options["model"] step_sigmas = extra_options["sigmas"] sigma_to = model.inner_model.model_sampling.sigma_min if step_sigmas[-1] > sigma_to: sigma_to = step_sigmas[-1] sigma_from = step_sigmas[0] ts_from = model.inner_model.model_sampling.timestep(sigma_from) ts_to = model.inner_model.model_sampling.timestep(sigma_to) current_ts = model.inner_model.model_sampling.timestep(sigma[0]) threshold = (current_ts - ts_to) / (ts_from - ts_to) return (denoise_mask >= threshold).to(denoise_mask.dtype) NODE_CLASS_MAPPINGS = { "DifferentialDiffusion": DifferentialDiffusion, } NODE_DISPLAY_NAME_MAPPINGS = { "DifferentialDiffusion": "Differential Diffusion", }
import torch import logging def Fourier_filter(x, threshold, scale): x_freq = torch.fft.fftn(x.float(), dim=(-2, -1)) x_freq = torch.fft.fftshift(x_freq, dim=(-2, -1)) B, C, H, W = x_freq.shape mask = torch.ones((B, C, H, W), device=x.device) crow, ccol = H mask[..., crow - threshold:crow + threshold, ccol - threshold:ccol + threshold] = scale x_freq = x_freq * mask x_freq = torch.fft.ifftshift(x_freq, dim=(-2, -1)) x_filtered = torch.fft.ifftn(x_freq, dim=(-2, -1)).real return x_filtered.to(x.dtype) class FreeU: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "b1": ("FLOAT", {"default": 1.1, "min": 0.0, "max": 10.0, "step": 0.01}), "b2": ("FLOAT", {"default": 1.2, "min": 0.0, "max": 10.0, "step": 0.01}), "s1": ("FLOAT", {"default": 0.9, "min": 0.0, "max": 10.0, "step": 0.01}), "s2": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "model_patches" def patch(self, model, b1, b2, s1, s2): model_channels = model.model.model_config.unet_config["model_channels"] scale_dict = {model_channels * 4: (b1, s1), model_channels * 2: (b2, s2)} on_cpu_devices = {} def output_block_patch(h, hsp, transformer_options): scale = scale_dict.get(int(h.shape[1]), None) if scale is not None: h[:,:h.shape[1] if hsp.device not in on_cpu_devices: try: hsp = Fourier_filter(hsp, threshold=1, scale=scale[1]) except: logging.warning("Device {} does not support the torch.fft functions used in the FreeU node, switching to CPU.".format(hsp.device)) on_cpu_devices[hsp.device] = True hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) else: hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) return h, hsp m = model.clone() m.set_model_output_block_patch(output_block_patch) return (m, ) class FreeU_V2: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "b1": ("FLOAT", {"default": 1.3, "min": 0.0, "max": 10.0, "step": 0.01}), "b2": ("FLOAT", {"default": 1.4, "min": 0.0, "max": 10.0, "step": 0.01}), "s1": ("FLOAT", {"default": 0.9, "min": 0.0, "max": 10.0, "step": 0.01}), "s2": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "model_patches" def patch(self, model, b1, b2, s1, s2): model_channels = model.model.model_config.unet_config["model_channels"] scale_dict = {model_channels * 4: (b1, s1), model_channels * 2: (b2, s2)} on_cpu_devices = {} def output_block_patch(h, hsp, transformer_options): scale = scale_dict.get(int(h.shape[1]), None) if scale is not None: hidden_mean = h.mean(1).unsqueeze(1) B = hidden_mean.shape[0] hidden_max, _ = torch.max(hidden_mean.view(B, -1), dim=-1, keepdim=True) hidden_min, _ = torch.min(hidden_mean.view(B, -1), dim=-1, keepdim=True) hidden_mean = (hidden_mean - hidden_min.unsqueeze(2).unsqueeze(3)) / (hidden_max - hidden_min).unsqueeze(2).unsqueeze(3) h[:,:h.shape[1] if hsp.device not in on_cpu_devices: try: hsp = Fourier_filter(hsp, threshold=1, scale=scale[1]) except: logging.warning("Device {} does not support the torch.fft functions used in the FreeU node, switching to CPU.".format(hsp.device)) on_cpu_devices[hsp.device] = True hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) else: hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) return h, hsp m = model.clone() m.set_model_output_block_patch(output_block_patch) return (m, ) NODE_CLASS_MAPPINGS = { "FreeU": FreeU, "FreeU_V2": FreeU_V2, }
import numpy as np import torch def loglinear_interp(t_steps, num_steps): """ Performs log-linear interpolation of a given array of decreasing numbers. """ xs = np.linspace(0, 1, len(t_steps)) ys = np.log(t_steps[::-1]) new_xs = np.linspace(0, 1, num_steps) new_ys = np.interp(new_xs, xs, ys) interped_ys = np.exp(new_ys)[::-1].copy() return interped_ys NOISE_LEVELS = { 0.80: [ [14.61464119, 7.49001646, 0.02916753], [14.61464119, 11.54541874, 6.77309084, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 3.07277966, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 2.05039096, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 2.05039096, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 13.76078796, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.1956799, 1.98035145, 0.86115354, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.1956799, 1.98035145, 0.86115354, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.07277966, 1.84880662, 0.83188516, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.07277966, 1.84880662, 0.83188516, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.75677586, 2.84484982, 1.78698075, 0.803307, 0.02916753], ], 0.85: [ [14.61464119, 7.49001646, 0.02916753], [14.61464119, 7.49001646, 1.84880662, 0.02916753], [14.61464119, 11.54541874, 6.77309084, 1.56271636, 0.02916753], [14.61464119, 11.54541874, 7.11996698, 3.07277966, 1.24153244, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.09240818, 2.84484982, 0.95350921, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.09240818, 2.84484982, 0.95350921, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.58536053, 3.1956799, 1.84880662, 0.803307, 0.02916753], [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 5.58536053, 3.1956799, 1.84880662, 0.803307, 0.02916753], [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], [14.61464119, 13.76078796, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.60512662, 2.6383388, 1.56271636, 0.72133851, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], ], 0.90: [ [14.61464119, 6.77309084, 0.02916753], [14.61464119, 7.49001646, 1.56271636, 0.02916753], [14.61464119, 7.49001646, 3.07277966, 0.95350921, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.54230714, 0.89115214, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.54230714, 0.89115214, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.09240818, 3.07277966, 1.61558151, 0.69515091, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.11996698, 4.86714602, 3.07277966, 1.61558151, 0.69515091, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 2.95596409, 1.61558151, 0.69515091, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.24153244, 0.57119018, 0.02916753], [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.24153244, 0.57119018, 0.02916753], [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.24153244, 0.57119018, 0.02916753], [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.75677586, 2.84484982, 1.84880662, 1.08895338, 0.52423614, 0.02916753], [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.75677586, 2.84484982, 1.84880662, 1.08895338, 0.52423614, 0.02916753], [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, 4.45427561, 3.32507086, 2.45070267, 1.61558151, 0.95350921, 0.45573691, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, 4.45427561, 3.32507086, 2.45070267, 1.61558151, 0.95350921, 0.45573691, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, 0.45573691, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, 0.45573691, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, 0.45573691, 0.02916753], [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.19988537, 1.51179266, 0.89115214, 0.43325692, 0.02916753], ], 0.95: [ [14.61464119, 6.77309084, 0.02916753], [14.61464119, 6.77309084, 1.56271636, 0.02916753], [14.61464119, 7.49001646, 2.84484982, 0.89115214, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.36326075, 0.803307, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.56271636, 0.64427125, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.95596409, 1.56271636, 0.64427125, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 4.86714602, 3.07277966, 1.91321158, 1.08895338, 0.50118381, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.91321158, 1.08895338, 0.50118381, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.91321158, 1.08895338, 0.50118381, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.41535246, 0.803307, 0.38853383, 0.02916753], [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.46139455, 2.6383388, 1.84880662, 1.24153244, 0.72133851, 0.34370604, 0.02916753], [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.46139455, 2.6383388, 1.84880662, 1.24153244, 0.72133851, 0.34370604, 0.02916753], [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 6.14220476, 4.86714602, 3.75677586, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 6.44769001, 5.58536053, 4.65472794, 3.60512662, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, 4.65472794, 3.60512662, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, 4.65472794, 3.75677586, 3.07277966, 2.45070267, 1.78698075, 1.24153244, 0.83188516, 0.50118381, 0.22545385, 0.02916753], [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.36326075, 1.72759056, 1.24153244, 0.83188516, 0.50118381, 0.22545385, 0.02916753], [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.36326075, 1.72759056, 1.24153244, 0.83188516, 0.50118381, 0.22545385, 0.02916753], [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.75677586, 3.07277966, 2.45070267, 1.91321158, 1.46270394, 1.05362725, 0.72133851, 0.43325692, 0.19894916, 0.02916753], ], 1.00: [ [14.61464119, 1.56271636, 0.02916753], [14.61464119, 6.77309084, 0.95350921, 0.02916753], [14.61464119, 6.77309084, 2.36326075, 0.803307, 0.02916753], [14.61464119, 7.11996698, 3.07277966, 1.56271636, 0.59516323, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.41535246, 0.57119018, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.86115354, 0.38853383, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.86115354, 0.38853383, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 4.86714602, 3.07277966, 1.98035145, 1.24153244, 0.72133851, 0.34370604, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.98035145, 1.24153244, 0.72133851, 0.34370604, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.27973175, 1.51179266, 0.95350921, 0.54755926, 0.25053367, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.12350607, 1.56271636, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, 1.61558151, 1.162866, 0.803307, 0.50118381, 0.27464288, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.75677586, 3.07277966, 2.45070267, 1.84880662, 1.36964464, 1.01931262, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.46139455, 2.84484982, 2.19988537, 1.67050016, 1.24153244, 0.92192322, 0.64427125, 0.43325692, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 12.2308979, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 12.2308979, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.26497746, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], ], 1.05: [ [14.61464119, 0.95350921, 0.02916753], [14.61464119, 6.77309084, 0.89115214, 0.02916753], [14.61464119, 6.77309084, 2.05039096, 0.72133851, 0.02916753], [14.61464119, 6.77309084, 2.84484982, 1.28281462, 0.52423614, 0.02916753], [14.61464119, 6.77309084, 3.07277966, 1.61558151, 0.803307, 0.34370604, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.56271636, 0.803307, 0.34370604, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.95350921, 0.52423614, 0.22545385, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.07277966, 1.98035145, 1.24153244, 0.74807048, 0.41087446, 0.17026083, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.27973175, 1.51179266, 0.95350921, 0.59516323, 0.34370604, 0.13792117, 0.02916753], [14.61464119, 7.49001646, 5.09240818, 3.46139455, 2.45070267, 1.61558151, 1.08895338, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.09240818, 3.46139455, 2.45070267, 1.61558151, 1.08895338, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, 1.08895338, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.72759056, 1.24153244, 0.86115354, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, 1.61558151, 1.162866, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, 1.67050016, 1.28281462, 0.95350921, 0.72133851, 0.52423614, 0.34370604, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.36326075, 1.84880662, 1.41535246, 1.08895338, 0.83188516, 0.61951244, 0.45573691, 0.32104823, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.57119018, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 8.30717278, 7.11996698, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.57119018, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 8.30717278, 7.11996698, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.45070267, 1.98035145, 1.61558151, 1.32549286, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.41087446, 0.29807833, 0.19894916, 0.09824532, 0.02916753], ], 1.10: [ [14.61464119, 0.89115214, 0.02916753], [14.61464119, 2.36326075, 0.72133851, 0.02916753], [14.61464119, 5.85520077, 1.61558151, 0.57119018, 0.02916753], [14.61464119, 6.77309084, 2.45070267, 1.08895338, 0.45573691, 0.02916753], [14.61464119, 6.77309084, 2.95596409, 1.56271636, 0.803307, 0.34370604, 0.02916753], [14.61464119, 6.77309084, 3.07277966, 1.61558151, 0.89115214, 0.4783645, 0.19894916, 0.02916753], [14.61464119, 6.77309084, 3.07277966, 1.84880662, 1.08895338, 0.64427125, 0.34370604, 0.13792117, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.95350921, 0.54755926, 0.27464288, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.91321158, 1.24153244, 0.803307, 0.4783645, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.05039096, 1.41535246, 0.95350921, 0.64427125, 0.41087446, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.27973175, 1.61558151, 1.12534678, 0.803307, 0.54755926, 0.36617002, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.32507086, 2.45070267, 1.72759056, 1.24153244, 0.89115214, 0.64427125, 0.45573691, 0.32104823, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 5.09240818, 3.60512662, 2.84484982, 2.05039096, 1.51179266, 1.08895338, 0.803307, 0.59516323, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 5.09240818, 3.60512662, 2.84484982, 2.12350607, 1.61558151, 1.24153244, 0.95350921, 0.72133851, 0.54755926, 0.41087446, 0.29807833, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.08895338, 0.83188516, 0.64427125, 0.50118381, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, 1.72759056, 1.36964464, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.43325692, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, 1.72759056, 1.36964464, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.43325692, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, 1.72759056, 1.36964464, 1.08895338, 0.89115214, 0.72133851, 0.59516323, 0.4783645, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], ], 1.15: [ [14.61464119, 0.83188516, 0.02916753], [14.61464119, 1.84880662, 0.59516323, 0.02916753], [14.61464119, 5.85520077, 1.56271636, 0.52423614, 0.02916753], [14.61464119, 5.85520077, 1.91321158, 0.83188516, 0.34370604, 0.02916753], [14.61464119, 5.85520077, 2.45070267, 1.24153244, 0.59516323, 0.25053367, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.51179266, 0.803307, 0.41087446, 0.17026083, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.56271636, 0.89115214, 0.50118381, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 6.77309084, 3.07277966, 1.84880662, 1.12534678, 0.72133851, 0.43325692, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 6.77309084, 3.07277966, 1.91321158, 1.24153244, 0.803307, 0.52423614, 0.34370604, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.91321158, 1.24153244, 0.803307, 0.52423614, 0.34370604, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.05039096, 1.36964464, 0.95350921, 0.69515091, 0.4783645, 0.32104823, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.803307, 0.59516323, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.803307, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, 0.74807048, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.78698075, 1.32549286, 1.01931262, 0.803307, 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.78698075, 1.32549286, 1.01931262, 0.803307, 0.64427125, 0.52423614, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, 0.72133851, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, 0.72133851, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, 0.72133851, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], ], 1.20: [ [14.61464119, 0.803307, 0.02916753], [14.61464119, 1.56271636, 0.52423614, 0.02916753], [14.61464119, 2.36326075, 0.92192322, 0.36617002, 0.02916753], [14.61464119, 2.84484982, 1.24153244, 0.59516323, 0.25053367, 0.02916753], [14.61464119, 5.85520077, 2.05039096, 0.95350921, 0.45573691, 0.17026083, 0.02916753], [14.61464119, 5.85520077, 2.45070267, 1.24153244, 0.64427125, 0.29807833, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.803307, 0.45573691, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.61558151, 0.95350921, 0.59516323, 0.36617002, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.67050016, 1.08895338, 0.74807048, 0.50118381, 0.32104823, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.24153244, 0.83188516, 0.59516323, 0.41087446, 0.27464288, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 3.07277966, 1.98035145, 1.36964464, 0.95350921, 0.69515091, 0.50118381, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 6.77309084, 3.46139455, 2.36326075, 1.56271636, 1.08895338, 0.803307, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 6.77309084, 3.46139455, 2.45070267, 1.61558151, 1.162866, 0.86115354, 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, 0.64427125, 0.50118381, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, 0.64427125, 0.50118381, 0.41087446, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.20157266, 0.92192322, 0.72133851, 0.57119018, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, 0.74807048, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, 0.74807048, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], ], 1.25: [ [14.61464119, 0.72133851, 0.02916753], [14.61464119, 1.56271636, 0.50118381, 0.02916753], [14.61464119, 2.05039096, 0.803307, 0.32104823, 0.02916753], [14.61464119, 2.36326075, 0.95350921, 0.43325692, 0.17026083, 0.02916753], [14.61464119, 2.84484982, 1.24153244, 0.59516323, 0.27464288, 0.09824532, 0.02916753], [14.61464119, 3.07277966, 1.51179266, 0.803307, 0.43325692, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.36326075, 1.24153244, 0.72133851, 0.41087446, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.83188516, 0.52423614, 0.34370604, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.61558151, 0.98595673, 0.64427125, 0.43325692, 0.27464288, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.67050016, 1.08895338, 0.74807048, 0.52423614, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.803307, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.24153244, 0.86115354, 0.64427125, 0.4783645, 0.36617002, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.28281462, 0.92192322, 0.69515091, 0.52423614, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.72133851, 0.54755926, 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.72133851, 0.57119018, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.74807048, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.41535246, 1.05362725, 0.803307, 0.61951244, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.41535246, 1.05362725, 0.803307, 0.64427125, 0.52423614, 0.43325692, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.46270394, 1.08895338, 0.83188516, 0.66947293, 0.54755926, 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], ], 1.30: [ [14.61464119, 0.72133851, 0.02916753], [14.61464119, 1.24153244, 0.43325692, 0.02916753], [14.61464119, 1.56271636, 0.59516323, 0.22545385, 0.02916753], [14.61464119, 1.84880662, 0.803307, 0.36617002, 0.13792117, 0.02916753], [14.61464119, 2.36326075, 1.01931262, 0.52423614, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.36964464, 0.74807048, 0.41087446, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 3.07277966, 1.56271636, 0.89115214, 0.54755926, 0.34370604, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 3.07277966, 1.61558151, 0.95350921, 0.61951244, 0.41087446, 0.27464288, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.83188516, 0.54755926, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.45070267, 1.41535246, 0.92192322, 0.64427125, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.6383388, 1.56271636, 1.01931262, 0.72133851, 0.50118381, 0.36617002, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.05362725, 0.74807048, 0.54755926, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.77538133, 0.57119018, 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.83188516, 0.64427125, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.83188516, 0.64427125, 0.52423614, 0.43325692, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.78698075, 1.24153244, 0.92192322, 0.72133851, 0.57119018, 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.84484982, 1.78698075, 1.24153244, 0.92192322, 0.72133851, 0.57119018, 0.4783645, 0.41087446, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], ], 1.35: [ [14.61464119, 0.69515091, 0.02916753], [14.61464119, 0.95350921, 0.34370604, 0.02916753], [14.61464119, 1.56271636, 0.57119018, 0.19894916, 0.02916753], [14.61464119, 1.61558151, 0.69515091, 0.29807833, 0.09824532, 0.02916753], [14.61464119, 1.84880662, 0.83188516, 0.43325692, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.162866, 0.64427125, 0.36617002, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.36964464, 0.803307, 0.50118381, 0.32104823, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.41535246, 0.83188516, 0.54755926, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.56271636, 0.95350921, 0.64427125, 0.45573691, 0.32104823, 0.22545385, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.56271636, 0.95350921, 0.64427125, 0.45573691, 0.34370604, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 3.07277966, 1.61558151, 1.01931262, 0.72133851, 0.52423614, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 3.07277966, 1.61558151, 1.01931262, 0.72133851, 0.52423614, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 3.07277966, 1.61558151, 1.05362725, 0.74807048, 0.54755926, 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 3.07277966, 1.72759056, 1.12534678, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 3.07277966, 1.72759056, 1.12534678, 0.803307, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.45070267, 1.51179266, 1.01931262, 0.74807048, 0.57119018, 0.45573691, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.43325692, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.45573691, 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], ], 1.40: [ [14.61464119, 0.59516323, 0.02916753], [14.61464119, 0.95350921, 0.34370604, 0.02916753], [14.61464119, 1.08895338, 0.43325692, 0.13792117, 0.02916753], [14.61464119, 1.56271636, 0.64427125, 0.27464288, 0.09824532, 0.02916753], [14.61464119, 1.61558151, 0.803307, 0.43325692, 0.22545385, 0.09824532, 0.02916753], [14.61464119, 2.05039096, 0.95350921, 0.54755926, 0.34370604, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.24153244, 0.72133851, 0.43325692, 0.27464288, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.52423614, 0.36617002, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.54755926, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.41535246, 0.86115354, 0.59516323, 0.43325692, 0.32104823, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.64427125, 0.45573691, 0.34370604, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.64427125, 0.4783645, 0.36617002, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.56271636, 0.98595673, 0.69515091, 0.52423614, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.72133851, 0.54755926, 0.43325692, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.61558151, 1.05362725, 0.74807048, 0.57119018, 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.41087446, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.43325692, 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.45573691, 0.41087446, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], ], 1.45: [ [14.61464119, 0.59516323, 0.02916753], [14.61464119, 0.803307, 0.25053367, 0.02916753], [14.61464119, 0.95350921, 0.34370604, 0.09824532, 0.02916753], [14.61464119, 1.24153244, 0.54755926, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 1.56271636, 0.72133851, 0.36617002, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 1.61558151, 0.803307, 0.45573691, 0.27464288, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 1.91321158, 0.95350921, 0.57119018, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 2.19988537, 1.08895338, 0.64427125, 0.41087446, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.34370604, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.36617002, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.54755926, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.28281462, 0.83188516, 0.59516323, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.28281462, 0.83188516, 0.59516323, 0.45573691, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.69515091, 0.52423614, 0.41087446, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.69515091, 0.52423614, 0.43325692, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.56271636, 0.98595673, 0.72133851, 0.54755926, 0.45573691, 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.74807048, 0.57119018, 0.4783645, 0.41087446, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.74807048, 0.59516323, 0.50118381, 0.43325692, 0.38853383, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], ], 1.50: [ [14.61464119, 0.54755926, 0.02916753], [14.61464119, 0.803307, 0.25053367, 0.02916753], [14.61464119, 0.86115354, 0.32104823, 0.09824532, 0.02916753], [14.61464119, 1.24153244, 0.54755926, 0.25053367, 0.09824532, 0.02916753], [14.61464119, 1.56271636, 0.72133851, 0.36617002, 0.19894916, 0.09824532, 0.02916753], [14.61464119, 1.61558151, 0.803307, 0.45573691, 0.27464288, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 1.61558151, 0.83188516, 0.52423614, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], [14.61464119, 1.84880662, 0.95350921, 0.59516323, 0.38853383, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 1.84880662, 0.95350921, 0.59516323, 0.41087446, 0.29807833, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 1.84880662, 0.95350921, 0.61951244, 0.43325692, 0.32104823, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.19988537, 1.12534678, 0.72133851, 0.50118381, 0.36617002, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.19988537, 1.12534678, 0.72133851, 0.50118381, 0.36617002, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.59516323, 0.45573691, 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.32549286, 0.86115354, 0.64427125, 0.50118381, 0.41087446, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.36964464, 0.92192322, 0.69515091, 0.54755926, 0.45573691, 0.41087446, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], [14.61464119, 2.45070267, 1.41535246, 0.95350921, 0.72133851, 0.57119018, 0.4783645, 0.43325692, 0.38853383, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], ], } class GITSScheduler: @classmethod def INPUT_TYPES(s): return {"required": {"coeff": ("FLOAT", {"default": 1.20, "min": 0.80, "max": 1.50, "step": 0.05}), "steps": ("INT", {"default": 10, "min": 2, "max": 1000}), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), } } RETURN_TYPES = ("SIGMAS",) CATEGORY = "sampling/custom_sampling/schedulers" FUNCTION = "get_sigmas" def get_sigmas(self, coeff, steps, denoise): total_steps = steps if denoise < 1.0: if denoise <= 0.0: return (torch.FloatTensor([]),) total_steps = round(steps * denoise) if steps <= 20: sigmas = NOISE_LEVELS[round(coeff, 2)][steps-2][:] else: sigmas = NOISE_LEVELS[round(coeff, 2)][-1][:] sigmas = loglinear_interp(sigmas, steps + 1) sigmas = sigmas[-(total_steps + 1):] sigmas[-1] = 0 return (torch.FloatTensor(sigmas), ) NODE_CLASS_MAPPINGS = { "GITSScheduler": GITSScheduler, }
import comfy.utils import folder_paths import torch import logging def load_hypernetwork_patch(path, strength): sd = comfy.utils.load_torch_file(path, safe_load=True) activation_func = sd.get('activation_func', 'linear') is_layer_norm = sd.get('is_layer_norm', False) use_dropout = sd.get('use_dropout', False) activate_output = sd.get('activate_output', False) last_layer_dropout = sd.get('last_layer_dropout', False) valid_activation = { "linear": torch.nn.Identity, "relu": torch.nn.ReLU, "leakyrelu": torch.nn.LeakyReLU, "elu": torch.nn.ELU, "swish": torch.nn.Hardswish, "tanh": torch.nn.Tanh, "sigmoid": torch.nn.Sigmoid, "softsign": torch.nn.Softsign, "mish": torch.nn.Mish, } if activation_func not in valid_activation: logging.error("Unsupported Hypernetwork format, if you report it I might implement it. {} {} {} {} {} {}".format(path, activation_func, is_layer_norm, use_dropout, activate_output, last_layer_dropout)) return None out = {} for d in sd: try: dim = int(d) except: continue output = [] for index in [0, 1]: attn_weights = sd[dim][index] keys = attn_weights.keys() linears = filter(lambda a: a.endswith(".weight"), keys) linears = list(map(lambda a: a[:-len(".weight")], linears)) layers = [] i = 0 while i < len(linears): lin_name = linears[i] last_layer = (i == (len(linears) - 1)) penultimate_layer = (i == (len(linears) - 2)) lin_weight = attn_weights['{}.weight'.format(lin_name)] lin_bias = attn_weights['{}.bias'.format(lin_name)] layer = torch.nn.Linear(lin_weight.shape[1], lin_weight.shape[0]) layer.load_state_dict({"weight": lin_weight, "bias": lin_bias}) layers.append(layer) if activation_func != "linear": if (not last_layer) or (activate_output): layers.append(valid_activation[activation_func]()) if is_layer_norm: i += 1 ln_name = linears[i] ln_weight = attn_weights['{}.weight'.format(ln_name)] ln_bias = attn_weights['{}.bias'.format(ln_name)] ln = torch.nn.LayerNorm(ln_weight.shape[0]) ln.load_state_dict({"weight": ln_weight, "bias": ln_bias}) layers.append(ln) if use_dropout: if (not last_layer) and (not penultimate_layer or last_layer_dropout): layers.append(torch.nn.Dropout(p=0.3)) i += 1 output.append(torch.nn.Sequential(*layers)) out[dim] = torch.nn.ModuleList(output) class hypernetwork_patch: def __init__(self, hypernet, strength): self.hypernet = hypernet self.strength = strength def __call__(self, q, k, v, extra_options): dim = k.shape[-1] if dim in self.hypernet: hn = self.hypernet[dim] k = k + hn[0](k) * self.strength v = v + hn[1](v) * self.strength return q, k, v def to(self, device): for d in self.hypernet.keys(): self.hypernet[d] = self.hypernet[d].to(device) return self return hypernetwork_patch(out, strength) class HypernetworkLoader: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "hypernetwork_name": (folder_paths.get_filename_list("hypernetworks"), ), "strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "load_hypernetwork" CATEGORY = "loaders" def load_hypernetwork(self, model, hypernetwork_name, strength): hypernetwork_path = folder_paths.get_full_path("hypernetworks", hypernetwork_name) model_hypernetwork = model.clone() patch = load_hypernetwork_patch(hypernetwork_path, strength) if patch is not None: model_hypernetwork.set_model_attn1_patch(patch) model_hypernetwork.set_model_attn2_patch(patch) return (model_hypernetwork,) NODE_CLASS_MAPPINGS = { "HypernetworkLoader": HypernetworkLoader }
import math from einops import rearrange from torch import randint def random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int: min_value = min(min_value, value) divisors = [i for i in range(min_value, value + 1) if value % i == 0] ns = [value if len(ns) - 1 > 0: idx = randint(low=0, high=len(ns) - 1, size=(1,)).item() else: idx = 0 return ns[idx] class HyperTile: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "tile_size": ("INT", {"default": 256, "min": 1, "max": 2048}), "swap_size": ("INT", {"default": 2, "min": 1, "max": 128}), "max_depth": ("INT", {"default": 0, "min": 0, "max": 10}), "scale_depth": ("BOOLEAN", {"default": False}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "model_patches" def patch(self, model, tile_size, swap_size, max_depth, scale_depth): model_channels = model.model.model_config.unet_config["model_channels"] latent_tile_size = max(32, tile_size) self.temp = None def hypertile_in(q, k, v, extra_options): model_chans = q.shape[-2] orig_shape = extra_options['original_shape'] apply_to = [] for i in range(max_depth + 1): apply_to.append((orig_shape[-2] / (2 ** i)) * (orig_shape[-1] / (2 ** i))) if model_chans in apply_to: shape = extra_options["original_shape"] aspect_ratio = shape[-1] / shape[-2] hw = q.size(1) h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio)) factor = (2 ** apply_to.index(model_chans)) if scale_depth else 1 nh = random_divisor(h, latent_tile_size * factor, swap_size) nw = random_divisor(w, latent_tile_size * factor, swap_size) if nh * nw > 1: q = rearrange(q, "b (nh h nw w) c -> (b nh nw) (h w) c", h=h self.temp = (nh, nw, h, w) return q, k, v return q, k, v def hypertile_out(out, extra_options): if self.temp is not None: nh, nw, h, w = self.temp self.temp = None out = rearrange(out, "(b nh nw) hw c -> b nh nw hw c", nh=nh, nw=nw) out = rearrange(out, "b nh nw (h w) c -> b (nh h nw w) c", h=h return out m = model.clone() m.set_model_attn1_patch(hypertile_in) m.set_model_attn1_output_patch(hypertile_out) return (m, ) NODE_CLASS_MAPPINGS = { "HyperTile": HyperTile, }
import nodes import folder_paths from comfy.cli_args import args from PIL import Image from PIL.PngImagePlugin import PngInfo import numpy as np import json import os MAX_RESOLUTION = nodes.MAX_RESOLUTION class ImageCrop: @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",), "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), }} RETURN_TYPES = ("IMAGE",) FUNCTION = "crop" CATEGORY = "image/transform" def crop(self, image, width, height, x, y): x = min(x, image.shape[2] - 1) y = min(y, image.shape[1] - 1) to_x = width + x to_y = height + y img = image[:,y:to_y, x:to_x, :] return (img,) class RepeatImageBatch: @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",), "amount": ("INT", {"default": 1, "min": 1, "max": 4096}), }} RETURN_TYPES = ("IMAGE",) FUNCTION = "repeat" CATEGORY = "image/batch" def repeat(self, image, amount): s = image.repeat((amount, 1,1,1)) return (s,) class ImageFromBatch: @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",), "batch_index": ("INT", {"default": 0, "min": 0, "max": 4095}), "length": ("INT", {"default": 1, "min": 1, "max": 4096}), }} RETURN_TYPES = ("IMAGE",) FUNCTION = "frombatch" CATEGORY = "image/batch" def frombatch(self, image, batch_index, length): s_in = image batch_index = min(s_in.shape[0] - 1, batch_index) length = min(s_in.shape[0] - batch_index, length) s = s_in[batch_index:batch_index + length].clone() return (s,) class SaveAnimatedWEBP: def __init__(self): self.output_dir = folder_paths.get_output_directory() self.type = "output" self.prefix_append = "" methods = {"default": 4, "fastest": 0, "slowest": 6} @classmethod def INPUT_TYPES(s): return {"required": {"images": ("IMAGE", ), "filename_prefix": ("STRING", {"default": "ComfyUI"}), "fps": ("FLOAT", {"default": 6.0, "min": 0.01, "max": 1000.0, "step": 0.01}), "lossless": ("BOOLEAN", {"default": True}), "quality": ("INT", {"default": 80, "min": 0, "max": 100}), "method": (list(s.methods.keys()),), }, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, } RETURN_TYPES = () FUNCTION = "save_images" OUTPUT_NODE = True CATEGORY = "image/animation" def save_images(self, images, fps, filename_prefix, lossless, quality, method, num_frames=0, prompt=None, extra_pnginfo=None): method = self.methods.get(method) filename_prefix += self.prefix_append full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) results = list() pil_images = [] for image in images: i = 255. * image.cpu().numpy() img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) pil_images.append(img) metadata = pil_images[0].getexif() if not args.disable_metadata: if prompt is not None: metadata[0x0110] = "prompt:{}".format(json.dumps(prompt)) if extra_pnginfo is not None: inital_exif = 0x010f for x in extra_pnginfo: metadata[inital_exif] = "{}:{}".format(x, json.dumps(extra_pnginfo[x])) inital_exif -= 1 if num_frames == 0: num_frames = len(pil_images) c = len(pil_images) for i in range(0, c, num_frames): file = f"{filename}_{counter:05}_.webp" pil_images[i].save(os.path.join(full_output_folder, file), save_all=True, duration=int(1000.0/fps), append_images=pil_images[i + 1:i + num_frames], exif=metadata, lossless=lossless, quality=quality, method=method) results.append({ "filename": file, "subfolder": subfolder, "type": self.type }) counter += 1 animated = num_frames != 1 return { "ui": { "images": results, "animated": (animated,) } } class SaveAnimatedPNG: def __init__(self): self.output_dir = folder_paths.get_output_directory() self.type = "output" self.prefix_append = "" @classmethod def INPUT_TYPES(s): return {"required": {"images": ("IMAGE", ), "filename_prefix": ("STRING", {"default": "ComfyUI"}), "fps": ("FLOAT", {"default": 6.0, "min": 0.01, "max": 1000.0, "step": 0.01}), "compress_level": ("INT", {"default": 4, "min": 0, "max": 9}) }, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, } RETURN_TYPES = () FUNCTION = "save_images" OUTPUT_NODE = True CATEGORY = "image/animation" def save_images(self, images, fps, compress_level, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None): filename_prefix += self.prefix_append full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) results = list() pil_images = [] for image in images: i = 255. * image.cpu().numpy() img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) pil_images.append(img) metadata = None if not args.disable_metadata: metadata = PngInfo() if prompt is not None: metadata.add(b"comf", "prompt".encode("latin-1", "strict") + b"\0" + json.dumps(prompt).encode("latin-1", "strict"), after_idat=True) if extra_pnginfo is not None: for x in extra_pnginfo: metadata.add(b"comf", x.encode("latin-1", "strict") + b"\0" + json.dumps(extra_pnginfo[x]).encode("latin-1", "strict"), after_idat=True) file = f"{filename}_{counter:05}_.png" pil_images[0].save(os.path.join(full_output_folder, file), pnginfo=metadata, compress_level=compress_level, save_all=True, duration=int(1000.0/fps), append_images=pil_images[1:]) results.append({ "filename": file, "subfolder": subfolder, "type": self.type }) return { "ui": { "images": results, "animated": (True,)} } NODE_CLASS_MAPPINGS = { "ImageCrop": ImageCrop, "RepeatImageBatch": RepeatImageBatch, "ImageFromBatch": ImageFromBatch, "SaveAnimatedWEBP": SaveAnimatedWEBP, "SaveAnimatedPNG": SaveAnimatedPNG, }
import torch class InstructPixToPixConditioning: @classmethod def INPUT_TYPES(s): return {"required": {"positive": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "vae": ("VAE", ), "pixels": ("IMAGE", ), }} RETURN_TYPES = ("CONDITIONING","CONDITIONING","LATENT") RETURN_NAMES = ("positive", "negative", "latent") FUNCTION = "encode" CATEGORY = "conditioning/instructpix2pix" def encode(self, positive, negative, pixels, vae): x = (pixels.shape[1] y = (pixels.shape[2] if pixels.shape[1] != x or pixels.shape[2] != y: x_offset = (pixels.shape[1] % 8) y_offset = (pixels.shape[2] % 8) pixels = pixels[:,x_offset:x + x_offset, y_offset:y + y_offset,:] concat_latent = vae.encode(pixels) out_latent = {} out_latent["samples"] = torch.zeros_like(concat_latent) out = [] for conditioning in [positive, negative]: c = [] for t in conditioning: d = t[1].copy() d["concat_latent_image"] = concat_latent n = [t[0], d] c.append(n) out.append(c) return (out[0], out[1], out_latent) NODE_CLASS_MAPPINGS = { "InstructPixToPixConditioning": InstructPixToPixConditioning, }
import comfy.utils import torch def reshape_latent_to(target_shape, latent): if latent.shape[1:] != target_shape[1:]: latent = comfy.utils.common_upscale(latent, target_shape[3], target_shape[2], "bilinear", "center") return comfy.utils.repeat_to_batch_size(latent, target_shape[0]) class LatentAdd: @classmethod def INPUT_TYPES(s): return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} RETURN_TYPES = ("LATENT",) FUNCTION = "op" CATEGORY = "latent/advanced" def op(self, samples1, samples2): samples_out = samples1.copy() s1 = samples1["samples"] s2 = samples2["samples"] s2 = reshape_latent_to(s1.shape, s2) samples_out["samples"] = s1 + s2 return (samples_out,) class LatentSubtract: @classmethod def INPUT_TYPES(s): return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} RETURN_TYPES = ("LATENT",) FUNCTION = "op" CATEGORY = "latent/advanced" def op(self, samples1, samples2): samples_out = samples1.copy() s1 = samples1["samples"] s2 = samples2["samples"] s2 = reshape_latent_to(s1.shape, s2) samples_out["samples"] = s1 - s2 return (samples_out,) class LatentMultiply: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("LATENT",) FUNCTION = "op" CATEGORY = "latent/advanced" def op(self, samples, multiplier): samples_out = samples.copy() s1 = samples["samples"] samples_out["samples"] = s1 * multiplier return (samples_out,) class LatentInterpolate: @classmethod def INPUT_TYPES(s): return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",), "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), }} RETURN_TYPES = ("LATENT",) FUNCTION = "op" CATEGORY = "latent/advanced" def op(self, samples1, samples2, ratio): samples_out = samples1.copy() s1 = samples1["samples"] s2 = samples2["samples"] s2 = reshape_latent_to(s1.shape, s2) m1 = torch.linalg.vector_norm(s1, dim=(1)) m2 = torch.linalg.vector_norm(s2, dim=(1)) s1 = torch.nan_to_num(s1 / m1) s2 = torch.nan_to_num(s2 / m2) t = (s1 * ratio + s2 * (1.0 - ratio)) mt = torch.linalg.vector_norm(t, dim=(1)) st = torch.nan_to_num(t / mt) samples_out["samples"] = st * (m1 * ratio + m2 * (1.0 - ratio)) return (samples_out,) class LatentBatch: @classmethod def INPUT_TYPES(s): return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} RETURN_TYPES = ("LATENT",) FUNCTION = "batch" CATEGORY = "latent/batch" def batch(self, samples1, samples2): samples_out = samples1.copy() s1 = samples1["samples"] s2 = samples2["samples"] if s1.shape[1:] != s2.shape[1:]: s2 = comfy.utils.common_upscale(s2, s1.shape[3], s1.shape[2], "bilinear", "center") s = torch.cat((s1, s2), dim=0) samples_out["samples"] = s samples_out["batch_index"] = samples1.get("batch_index", [x for x in range(0, s1.shape[0])]) + samples2.get("batch_index", [x for x in range(0, s2.shape[0])]) return (samples_out,) class LatentBatchSeedBehavior: @classmethod def INPUT_TYPES(s): return {"required": { "samples": ("LATENT",), "seed_behavior": (["random", "fixed"],{"default": "fixed"}),}} RETURN_TYPES = ("LATENT",) FUNCTION = "op" CATEGORY = "latent/advanced" def op(self, samples, seed_behavior): samples_out = samples.copy() latent = samples["samples"] if seed_behavior == "random": if 'batch_index' in samples_out: samples_out.pop('batch_index') elif seed_behavior == "fixed": batch_number = samples_out.get("batch_index", [0])[0] samples_out["batch_index"] = [batch_number] * latent.shape[0] return (samples_out,) NODE_CLASS_MAPPINGS = { "LatentAdd": LatentAdd, "LatentSubtract": LatentSubtract, "LatentMultiply": LatentMultiply, "LatentInterpolate": LatentInterpolate, "LatentBatch": LatentBatch, "LatentBatchSeedBehavior": LatentBatchSeedBehavior, }
import numpy as np import scipy.ndimage import torch import comfy.utils from nodes import MAX_RESOLUTION def composite(destination, source, x, y, mask = None, multiplier = 8, resize_source = False): source = source.to(destination.device) if resize_source: source = torch.nn.functional.interpolate(source, size=(destination.shape[2], destination.shape[3]), mode="bilinear") source = comfy.utils.repeat_to_batch_size(source, destination.shape[0]) x = max(-source.shape[3] * multiplier, min(x, destination.shape[3] * multiplier)) y = max(-source.shape[2] * multiplier, min(y, destination.shape[2] * multiplier)) left, top = (x right, bottom = (left + source.shape[3], top + source.shape[2],) if mask is None: mask = torch.ones_like(source) else: mask = mask.to(destination.device, copy=True) mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(source.shape[2], source.shape[3]), mode="bilinear") mask = comfy.utils.repeat_to_batch_size(mask, source.shape[0]) visible_width, visible_height = (destination.shape[3] - left + min(0, x), destination.shape[2] - top + min(0, y),) mask = mask[:, :, :visible_height, :visible_width] inverse_mask = torch.ones_like(mask) - mask source_portion = mask * source[:, :, :visible_height, :visible_width] destination_portion = inverse_mask * destination[:, :, top:bottom, left:right] destination[:, :, top:bottom, left:right] = source_portion + destination_portion return destination class LatentCompositeMasked: @classmethod def INPUT_TYPES(s): return { "required": { "destination": ("LATENT",), "source": ("LATENT",), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), "resize_source": ("BOOLEAN", {"default": False}), }, "optional": { "mask": ("MASK",), } } RETURN_TYPES = ("LATENT",) FUNCTION = "composite" CATEGORY = "latent" def composite(self, destination, source, x, y, resize_source, mask = None): output = destination.copy() destination = destination["samples"].clone() source = source["samples"] output["samples"] = composite(destination, source, x, y, mask, 8, resize_source) return (output,) class ImageCompositeMasked: @classmethod def INPUT_TYPES(s): return { "required": { "destination": ("IMAGE",), "source": ("IMAGE",), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "resize_source": ("BOOLEAN", {"default": False}), }, "optional": { "mask": ("MASK",), } } RETURN_TYPES = ("IMAGE",) FUNCTION = "composite" CATEGORY = "image" def composite(self, destination, source, x, y, resize_source, mask = None): destination = destination.clone().movedim(-1, 1) output = composite(destination, source.movedim(-1, 1), x, y, mask, 1, resize_source).movedim(1, -1) return (output,) class MaskToImage: @classmethod def INPUT_TYPES(s): return { "required": { "mask": ("MASK",), } } CATEGORY = "mask" RETURN_TYPES = ("IMAGE",) FUNCTION = "mask_to_image" def mask_to_image(self, mask): result = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3) return (result,) class ImageToMask: @classmethod def INPUT_TYPES(s): return { "required": { "image": ("IMAGE",), "channel": (["red", "green", "blue", "alpha"],), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "image_to_mask" def image_to_mask(self, image, channel): channels = ["red", "green", "blue", "alpha"] mask = image[:, :, :, channels.index(channel)] return (mask,) class ImageColorToMask: @classmethod def INPUT_TYPES(s): return { "required": { "image": ("IMAGE",), "color": ("INT", {"default": 0, "min": 0, "max": 0xFFFFFF, "step": 1, "display": "color"}), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "image_to_mask" def image_to_mask(self, image, color): temp = (torch.clamp(image, 0, 1.0) * 255.0).round().to(torch.int) temp = torch.bitwise_left_shift(temp[:,:,:,0], 16) + torch.bitwise_left_shift(temp[:,:,:,1], 8) + temp[:,:,:,2] mask = torch.where(temp == color, 255, 0).float() return (mask,) class SolidMask: @classmethod def INPUT_TYPES(cls): return { "required": { "value": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "solid" def solid(self, value, width, height): out = torch.full((1, height, width), value, dtype=torch.float32, device="cpu") return (out,) class InvertMask: @classmethod def INPUT_TYPES(cls): return { "required": { "mask": ("MASK",), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "invert" def invert(self, mask): out = 1.0 - mask return (out,) class CropMask: @classmethod def INPUT_TYPES(cls): return { "required": { "mask": ("MASK",), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "crop" def crop(self, mask, x, y, width, height): mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) out = mask[:, y:y + height, x:x + width] return (out,) class MaskComposite: @classmethod def INPUT_TYPES(cls): return { "required": { "destination": ("MASK",), "source": ("MASK",), "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "operation": (["multiply", "add", "subtract", "and", "or", "xor"],), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "combine" def combine(self, destination, source, x, y, operation): output = destination.reshape((-1, destination.shape[-2], destination.shape[-1])).clone() source = source.reshape((-1, source.shape[-2], source.shape[-1])) left, top = (x, y,) right, bottom = (min(left + source.shape[-1], destination.shape[-1]), min(top + source.shape[-2], destination.shape[-2])) visible_width, visible_height = (right - left, bottom - top,) source_portion = source[:, :visible_height, :visible_width] destination_portion = destination[:, top:bottom, left:right] if operation == "multiply": output[:, top:bottom, left:right] = destination_portion * source_portion elif operation == "add": output[:, top:bottom, left:right] = destination_portion + source_portion elif operation == "subtract": output[:, top:bottom, left:right] = destination_portion - source_portion elif operation == "and": output[:, top:bottom, left:right] = torch.bitwise_and(destination_portion.round().bool(), source_portion.round().bool()).float() elif operation == "or": output[:, top:bottom, left:right] = torch.bitwise_or(destination_portion.round().bool(), source_portion.round().bool()).float() elif operation == "xor": output[:, top:bottom, left:right] = torch.bitwise_xor(destination_portion.round().bool(), source_portion.round().bool()).float() output = torch.clamp(output, 0.0, 1.0) return (output,) class FeatherMask: @classmethod def INPUT_TYPES(cls): return { "required": { "mask": ("MASK",), "left": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "top": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "right": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "bottom": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "feather" def feather(self, mask, left, top, right, bottom): output = mask.reshape((-1, mask.shape[-2], mask.shape[-1])).clone() left = min(left, output.shape[-1]) right = min(right, output.shape[-1]) top = min(top, output.shape[-2]) bottom = min(bottom, output.shape[-2]) for x in range(left): feather_rate = (x + 1.0) / left output[:, :, x] *= feather_rate for x in range(right): feather_rate = (x + 1) / right output[:, :, -x] *= feather_rate for y in range(top): feather_rate = (y + 1) / top output[:, y, :] *= feather_rate for y in range(bottom): feather_rate = (y + 1) / bottom output[:, -y, :] *= feather_rate return (output,) class GrowMask: @classmethod def INPUT_TYPES(cls): return { "required": { "mask": ("MASK",), "expand": ("INT", {"default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1}), "tapered_corners": ("BOOLEAN", {"default": True}), }, } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "expand_mask" def expand_mask(self, mask, expand, tapered_corners): c = 0 if tapered_corners else 1 kernel = np.array([[c, 1, c], [1, 1, 1], [c, 1, c]]) mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) out = [] for m in mask: output = m.numpy() for _ in range(abs(expand)): if expand < 0: output = scipy.ndimage.grey_erosion(output, footprint=kernel) else: output = scipy.ndimage.grey_dilation(output, footprint=kernel) output = torch.from_numpy(output) out.append(output) return (torch.stack(out, dim=0),) class ThresholdMask: @classmethod def INPUT_TYPES(s): return { "required": { "mask": ("MASK",), "value": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), } } CATEGORY = "mask" RETURN_TYPES = ("MASK",) FUNCTION = "image_to_mask" def image_to_mask(self, mask, value): mask = (mask > value).float() return (mask,) NODE_CLASS_MAPPINGS = { "LatentCompositeMasked": LatentCompositeMasked, "ImageCompositeMasked": ImageCompositeMasked, "MaskToImage": MaskToImage, "ImageToMask": ImageToMask, "ImageColorToMask": ImageColorToMask, "SolidMask": SolidMask, "InvertMask": InvertMask, "CropMask": CropMask, "MaskComposite": MaskComposite, "FeatherMask": FeatherMask, "GrowMask": GrowMask, "ThresholdMask": ThresholdMask, } NODE_DISPLAY_NAME_MAPPINGS = { "ImageToMask": "Convert Image to Mask", "MaskToImage": "Convert Mask to Image", }
import folder_paths import comfy.sd import comfy.model_sampling import comfy.latent_formats import torch class LCM(comfy.model_sampling.EPS): def calculate_denoised(self, sigma, model_output, model_input): timestep = self.timestep(sigma).view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) x0 = model_input - model_output * sigma sigma_data = 0.5 scaled_timestep = timestep * 10.0 c_skip = sigma_data**2 / (scaled_timestep**2 + sigma_data**2) c_out = scaled_timestep / (scaled_timestep**2 + sigma_data**2) ** 0.5 return c_out * x0 + c_skip * model_input class X0(comfy.model_sampling.EPS): def calculate_denoised(self, sigma, model_output, model_input): return model_output class ModelSamplingDiscreteDistilled(comfy.model_sampling.ModelSamplingDiscrete): original_timesteps = 50 def __init__(self, model_config=None): super().__init__(model_config) self.skip_steps = self.num_timesteps sigmas_valid = torch.zeros((self.original_timesteps), dtype=torch.float32) for x in range(self.original_timesteps): sigmas_valid[self.original_timesteps - 1 - x] = self.sigmas[self.num_timesteps - 1 - x * self.skip_steps] self.set_sigmas(sigmas_valid) def timestep(self, sigma): log_sigma = sigma.log() dists = log_sigma.to(self.log_sigmas.device) - self.log_sigmas[:, None] return (dists.abs().argmin(dim=0).view(sigma.shape) * self.skip_steps + (self.skip_steps - 1)).to(sigma.device) def sigma(self, timestep): t = torch.clamp(((timestep.float().to(self.log_sigmas.device) - (self.skip_steps - 1)) / self.skip_steps).float(), min=0, max=(len(self.sigmas) - 1)) low_idx = t.floor().long() high_idx = t.ceil().long() w = t.frac() log_sigma = (1 - w) * self.log_sigmas[low_idx] + w * self.log_sigmas[high_idx] return log_sigma.exp().to(timestep.device) def rescale_zero_terminal_snr_sigmas(sigmas): alphas_cumprod = 1 / ((sigmas * sigmas) + 1) alphas_bar_sqrt = alphas_cumprod.sqrt() alphas_bar_sqrt_0 = alphas_bar_sqrt[0].clone() alphas_bar_sqrt_T = alphas_bar_sqrt[-1].clone() alphas_bar_sqrt -= (alphas_bar_sqrt_T) alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T) alphas_bar = alphas_bar_sqrt**2 alphas_bar[-1] = 4.8973451890853435e-08 return ((1 - alphas_bar) / alphas_bar) ** 0.5 class ModelSamplingDiscrete: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "sampling": (["eps", "v_prediction", "lcm", "x0"],), "zsnr": ("BOOLEAN", {"default": False}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "advanced/model" def patch(self, model, sampling, zsnr): m = model.clone() sampling_base = comfy.model_sampling.ModelSamplingDiscrete if sampling == "eps": sampling_type = comfy.model_sampling.EPS elif sampling == "v_prediction": sampling_type = comfy.model_sampling.V_PREDICTION elif sampling == "lcm": sampling_type = LCM sampling_base = ModelSamplingDiscreteDistilled elif sampling == "x0": sampling_type = X0 class ModelSamplingAdvanced(sampling_base, sampling_type): pass model_sampling = ModelSamplingAdvanced(model.model.model_config) if zsnr: model_sampling.set_sigmas(rescale_zero_terminal_snr_sigmas(model_sampling.sigmas)) m.add_object_patch("model_sampling", model_sampling) return (m, ) class ModelSamplingStableCascade: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "shift": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 100.0, "step":0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "advanced/model" def patch(self, model, shift): m = model.clone() sampling_base = comfy.model_sampling.StableCascadeSampling sampling_type = comfy.model_sampling.EPS class ModelSamplingAdvanced(sampling_base, sampling_type): pass model_sampling = ModelSamplingAdvanced(model.model.model_config) model_sampling.set_parameters(shift) m.add_object_patch("model_sampling", model_sampling) return (m, ) class ModelSamplingSD3: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "shift": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 100.0, "step":0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "advanced/model" def patch(self, model, shift): m = model.clone() sampling_base = comfy.model_sampling.ModelSamplingDiscreteFlow sampling_type = comfy.model_sampling.CONST class ModelSamplingAdvanced(sampling_base, sampling_type): pass model_sampling = ModelSamplingAdvanced(model.model.model_config) model_sampling.set_parameters(shift=shift) m.add_object_patch("model_sampling", model_sampling) return (m, ) class ModelSamplingContinuousEDM: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "sampling": (["v_prediction", "edm_playground_v2.5", "eps"],), "sigma_max": ("FLOAT", {"default": 120.0, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), "sigma_min": ("FLOAT", {"default": 0.002, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "advanced/model" def patch(self, model, sampling, sigma_max, sigma_min): m = model.clone() latent_format = None sigma_data = 1.0 if sampling == "eps": sampling_type = comfy.model_sampling.EPS elif sampling == "v_prediction": sampling_type = comfy.model_sampling.V_PREDICTION elif sampling == "edm_playground_v2.5": sampling_type = comfy.model_sampling.EDM sigma_data = 0.5 latent_format = comfy.latent_formats.SDXL_Playground_2_5() class ModelSamplingAdvanced(comfy.model_sampling.ModelSamplingContinuousEDM, sampling_type): pass model_sampling = ModelSamplingAdvanced(model.model.model_config) model_sampling.set_parameters(sigma_min, sigma_max, sigma_data) m.add_object_patch("model_sampling", model_sampling) if latent_format is not None: m.add_object_patch("latent_format", latent_format) return (m, ) class ModelSamplingContinuousV: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "sampling": (["v_prediction"],), "sigma_max": ("FLOAT", {"default": 500.0, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), "sigma_min": ("FLOAT", {"default": 0.03, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "advanced/model" def patch(self, model, sampling, sigma_max, sigma_min): m = model.clone() latent_format = None sigma_data = 1.0 if sampling == "v_prediction": sampling_type = comfy.model_sampling.V_PREDICTION class ModelSamplingAdvanced(comfy.model_sampling.ModelSamplingContinuousV, sampling_type): pass model_sampling = ModelSamplingAdvanced(model.model.model_config) model_sampling.set_parameters(sigma_min, sigma_max, sigma_data) m.add_object_patch("model_sampling", model_sampling) return (m, ) class RescaleCFG: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "multiplier": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "advanced/model" def patch(self, model, multiplier): def rescale_cfg(args): cond = args["cond"] uncond = args["uncond"] cond_scale = args["cond_scale"] sigma = args["sigma"] sigma = sigma.view(sigma.shape[:1] + (1,) * (cond.ndim - 1)) x_orig = args["input"] x = x_orig / (sigma * sigma + 1.0) cond = ((x - (x_orig - cond)) * (sigma ** 2 + 1.0) ** 0.5) / (sigma) uncond = ((x - (x_orig - uncond)) * (sigma ** 2 + 1.0) ** 0.5) / (sigma) x_cfg = uncond + cond_scale * (cond - uncond) ro_pos = torch.std(cond, dim=(1,2,3), keepdim=True) ro_cfg = torch.std(x_cfg, dim=(1,2,3), keepdim=True) x_rescaled = x_cfg * (ro_pos / ro_cfg) x_final = multiplier * x_rescaled + (1.0 - multiplier) * x_cfg return x_orig - (x - x_final * sigma / (sigma * sigma + 1.0) ** 0.5) m = model.clone() m.set_model_sampler_cfg_function(rescale_cfg) return (m, ) NODE_CLASS_MAPPINGS = { "ModelSamplingDiscrete": ModelSamplingDiscrete, "ModelSamplingContinuousEDM": ModelSamplingContinuousEDM, "ModelSamplingContinuousV": ModelSamplingContinuousV, "ModelSamplingStableCascade": ModelSamplingStableCascade, "ModelSamplingSD3": ModelSamplingSD3, "RescaleCFG": RescaleCFG, }
import torch import comfy.utils class PatchModelAddDownscale: upscale_methods = ["bicubic", "nearest-exact", "bilinear", "area", "bislerp"] @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "block_number": ("INT", {"default": 3, "min": 1, "max": 32, "step": 1}), "downscale_factor": ("FLOAT", {"default": 2.0, "min": 0.1, "max": 9.0, "step": 0.001}), "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), "end_percent": ("FLOAT", {"default": 0.35, "min": 0.0, "max": 1.0, "step": 0.001}), "downscale_after_skip": ("BOOLEAN", {"default": True}), "downscale_method": (s.upscale_methods,), "upscale_method": (s.upscale_methods,), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "_for_testing" def patch(self, model, block_number, downscale_factor, start_percent, end_percent, downscale_after_skip, downscale_method, upscale_method): model_sampling = model.get_model_object("model_sampling") sigma_start = model_sampling.percent_to_sigma(start_percent) sigma_end = model_sampling.percent_to_sigma(end_percent) def input_block_patch(h, transformer_options): if transformer_options["block"][1] == block_number: sigma = transformer_options["sigmas"][0].item() if sigma <= sigma_start and sigma >= sigma_end: h = comfy.utils.common_upscale(h, round(h.shape[-1] * (1.0 / downscale_factor)), round(h.shape[-2] * (1.0 / downscale_factor)), downscale_method, "disabled") return h def output_block_patch(h, hsp, transformer_options): if h.shape[2] != hsp.shape[2]: h = comfy.utils.common_upscale(h, hsp.shape[-1], hsp.shape[-2], upscale_method, "disabled") return h, hsp m = model.clone() if downscale_after_skip: m.set_model_input_block_patch_after_skip(input_block_patch) else: m.set_model_input_block_patch(input_block_patch) m.set_model_output_block_patch(output_block_patch) return (m, ) NODE_CLASS_MAPPINGS = { "PatchModelAddDownscale": PatchModelAddDownscale, } NODE_DISPLAY_NAME_MAPPINGS = { "PatchModelAddDownscale": "PatchModelAddDownscale (Kohya Deep Shrink)", }
import comfy.sd import comfy.utils import comfy.model_base import comfy.model_management import comfy.model_sampling import torch import folder_paths import json import os from comfy.cli_args import args class ModelMergeSimple: @classmethod def INPUT_TYPES(s): return {"required": { "model1": ("MODEL",), "model2": ("MODEL",), "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "merge" CATEGORY = "advanced/model_merging" def merge(self, model1, model2, ratio): m = model1.clone() kp = model2.get_key_patches("diffusion_model.") for k in kp: m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) return (m, ) class ModelSubtract: @classmethod def INPUT_TYPES(s): return {"required": { "model1": ("MODEL",), "model2": ("MODEL",), "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "merge" CATEGORY = "advanced/model_merging" def merge(self, model1, model2, multiplier): m = model1.clone() kp = model2.get_key_patches("diffusion_model.") for k in kp: m.add_patches({k: kp[k]}, - multiplier, multiplier) return (m, ) class ModelAdd: @classmethod def INPUT_TYPES(s): return {"required": { "model1": ("MODEL",), "model2": ("MODEL",), }} RETURN_TYPES = ("MODEL",) FUNCTION = "merge" CATEGORY = "advanced/model_merging" def merge(self, model1, model2): m = model1.clone() kp = model2.get_key_patches("diffusion_model.") for k in kp: m.add_patches({k: kp[k]}, 1.0, 1.0) return (m, ) class CLIPMergeSimple: @classmethod def INPUT_TYPES(s): return {"required": { "clip1": ("CLIP",), "clip2": ("CLIP",), "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), }} RETURN_TYPES = ("CLIP",) FUNCTION = "merge" CATEGORY = "advanced/model_merging" def merge(self, clip1, clip2, ratio): m = clip1.clone() kp = clip2.get_key_patches() for k in kp: if k.endswith(".position_ids") or k.endswith(".logit_scale"): continue m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) return (m, ) class CLIPSubtract: @classmethod def INPUT_TYPES(s): return {"required": { "clip1": ("CLIP",), "clip2": ("CLIP",), "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), }} RETURN_TYPES = ("CLIP",) FUNCTION = "merge" CATEGORY = "advanced/model_merging" def merge(self, clip1, clip2, multiplier): m = clip1.clone() kp = clip2.get_key_patches() for k in kp: if k.endswith(".position_ids") or k.endswith(".logit_scale"): continue m.add_patches({k: kp[k]}, - multiplier, multiplier) return (m, ) class CLIPAdd: @classmethod def INPUT_TYPES(s): return {"required": { "clip1": ("CLIP",), "clip2": ("CLIP",), }} RETURN_TYPES = ("CLIP",) FUNCTION = "merge" CATEGORY = "advanced/model_merging" def merge(self, clip1, clip2): m = clip1.clone() kp = clip2.get_key_patches() for k in kp: if k.endswith(".position_ids") or k.endswith(".logit_scale"): continue m.add_patches({k: kp[k]}, 1.0, 1.0) return (m, ) class ModelMergeBlocks: @classmethod def INPUT_TYPES(s): return {"required": { "model1": ("MODEL",), "model2": ("MODEL",), "input": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "middle": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) }} RETURN_TYPES = ("MODEL",) FUNCTION = "merge" CATEGORY = "advanced/model_merging" def merge(self, model1, model2, **kwargs): m = model1.clone() kp = model2.get_key_patches("diffusion_model.") default_ratio = next(iter(kwargs.values())) for k in kp: ratio = default_ratio k_unet = k[len("diffusion_model."):] last_arg_size = 0 for arg in kwargs: if k_unet.startswith(arg) and last_arg_size < len(arg): ratio = kwargs[arg] last_arg_size = len(arg) m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) return (m, ) def save_checkpoint(model, clip=None, vae=None, clip_vision=None, filename_prefix=None, output_dir=None, prompt=None, extra_pnginfo=None): full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, output_dir) prompt_info = "" if prompt is not None: prompt_info = json.dumps(prompt) metadata = {} enable_modelspec = True if isinstance(model.model, comfy.model_base.SDXL): if isinstance(model.model, comfy.model_base.SDXL_instructpix2pix): metadata["modelspec.architecture"] = "stable-diffusion-xl-v1-edit" else: metadata["modelspec.architecture"] = "stable-diffusion-xl-v1-base" elif isinstance(model.model, comfy.model_base.SDXLRefiner): metadata["modelspec.architecture"] = "stable-diffusion-xl-v1-refiner" elif isinstance(model.model, comfy.model_base.SVD_img2vid): metadata["modelspec.architecture"] = "stable-video-diffusion-img2vid-v1" elif isinstance(model.model, comfy.model_base.SD3): metadata["modelspec.architecture"] = "stable-diffusion-v3-medium" else: enable_modelspec = False if enable_modelspec: metadata["modelspec.sai_model_spec"] = "1.0.0" metadata["modelspec.implementation"] = "sgm" metadata["modelspec.title"] = "{} {}".format(filename, counter) extra_keys = {} model_sampling = model.get_model_object("model_sampling") if isinstance(model_sampling, comfy.model_sampling.ModelSamplingContinuousEDM): if isinstance(model_sampling, comfy.model_sampling.V_PREDICTION): extra_keys["edm_vpred.sigma_max"] = torch.tensor(model_sampling.sigma_max).float() extra_keys["edm_vpred.sigma_min"] = torch.tensor(model_sampling.sigma_min).float() if model.model.model_type == comfy.model_base.ModelType.EPS: metadata["modelspec.predict_key"] = "epsilon" elif model.model.model_type == comfy.model_base.ModelType.V_PREDICTION: metadata["modelspec.predict_key"] = "v" if not args.disable_metadata: metadata["prompt"] = prompt_info if extra_pnginfo is not None: for x in extra_pnginfo: metadata[x] = json.dumps(extra_pnginfo[x]) output_checkpoint = f"{filename}_{counter:05}_.safetensors" output_checkpoint = os.path.join(full_output_folder, output_checkpoint) comfy.sd.save_checkpoint(output_checkpoint, model, clip, vae, clip_vision, metadata=metadata, extra_keys=extra_keys) class CheckpointSave: def __init__(self): self.output_dir = folder_paths.get_output_directory() @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "clip": ("CLIP",), "vae": ("VAE",), "filename_prefix": ("STRING", {"default": "checkpoints/ComfyUI"}),}, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} RETURN_TYPES = () FUNCTION = "save" OUTPUT_NODE = True CATEGORY = "advanced/model_merging" def save(self, model, clip, vae, filename_prefix, prompt=None, extra_pnginfo=None): save_checkpoint(model, clip=clip, vae=vae, filename_prefix=filename_prefix, output_dir=self.output_dir, prompt=prompt, extra_pnginfo=extra_pnginfo) return {} class CLIPSave: def __init__(self): self.output_dir = folder_paths.get_output_directory() @classmethod def INPUT_TYPES(s): return {"required": { "clip": ("CLIP",), "filename_prefix": ("STRING", {"default": "clip/ComfyUI"}),}, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} RETURN_TYPES = () FUNCTION = "save" OUTPUT_NODE = True CATEGORY = "advanced/model_merging" def save(self, clip, filename_prefix, prompt=None, extra_pnginfo=None): prompt_info = "" if prompt is not None: prompt_info = json.dumps(prompt) metadata = {} if not args.disable_metadata: metadata["prompt"] = prompt_info if extra_pnginfo is not None: for x in extra_pnginfo: metadata[x] = json.dumps(extra_pnginfo[x]) comfy.model_management.load_models_gpu([clip.load_model()], force_patch_weights=True) clip_sd = clip.get_sd() for prefix in ["clip_l.", "clip_g.", ""]: k = list(filter(lambda a: a.startswith(prefix), clip_sd.keys())) current_clip_sd = {} for x in k: current_clip_sd[x] = clip_sd.pop(x) if len(current_clip_sd) == 0: continue p = prefix[:-1] replace_prefix = {} filename_prefix_ = filename_prefix if len(p) > 0: filename_prefix_ = "{}_{}".format(filename_prefix_, p) replace_prefix[prefix] = "" replace_prefix["transformer."] = "" full_output_folder, filename, counter, subfolder, filename_prefix_ = folder_paths.get_save_image_path(filename_prefix_, self.output_dir) output_checkpoint = f"{filename}_{counter:05}_.safetensors" output_checkpoint = os.path.join(full_output_folder, output_checkpoint) current_clip_sd = comfy.utils.state_dict_prefix_replace(current_clip_sd, replace_prefix) comfy.utils.save_torch_file(current_clip_sd, output_checkpoint, metadata=metadata) return {} class VAESave: def __init__(self): self.output_dir = folder_paths.get_output_directory() @classmethod def INPUT_TYPES(s): return {"required": { "vae": ("VAE",), "filename_prefix": ("STRING", {"default": "vae/ComfyUI_vae"}),}, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} RETURN_TYPES = () FUNCTION = "save" OUTPUT_NODE = True CATEGORY = "advanced/model_merging" def save(self, vae, filename_prefix, prompt=None, extra_pnginfo=None): full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) prompt_info = "" if prompt is not None: prompt_info = json.dumps(prompt) metadata = {} if not args.disable_metadata: metadata["prompt"] = prompt_info if extra_pnginfo is not None: for x in extra_pnginfo: metadata[x] = json.dumps(extra_pnginfo[x]) output_checkpoint = f"{filename}_{counter:05}_.safetensors" output_checkpoint = os.path.join(full_output_folder, output_checkpoint) comfy.utils.save_torch_file(vae.get_sd(), output_checkpoint, metadata=metadata) return {} NODE_CLASS_MAPPINGS = { "ModelMergeSimple": ModelMergeSimple, "ModelMergeBlocks": ModelMergeBlocks, "ModelMergeSubtract": ModelSubtract, "ModelMergeAdd": ModelAdd, "CheckpointSave": CheckpointSave, "CLIPMergeSimple": CLIPMergeSimple, "CLIPMergeSubtract": CLIPSubtract, "CLIPMergeAdd": CLIPAdd, "CLIPSave": CLIPSave, "VAESave": VAESave, }
import comfy_extras.nodes_model_merging class ModelMergeSD1(comfy_extras.nodes_model_merging.ModelMergeBlocks): CATEGORY = "advanced/model_merging/model_specific" @classmethod def INPUT_TYPES(s): arg_dict = { "model1": ("MODEL",), "model2": ("MODEL",)} argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) arg_dict["time_embed."] = argument arg_dict["label_emb."] = argument for i in range(12): arg_dict["input_blocks.{}.".format(i)] = argument for i in range(3): arg_dict["middle_block.{}.".format(i)] = argument for i in range(12): arg_dict["output_blocks.{}.".format(i)] = argument arg_dict["out."] = argument return {"required": arg_dict} class ModelMergeSDXL(comfy_extras.nodes_model_merging.ModelMergeBlocks): CATEGORY = "advanced/model_merging/model_specific" @classmethod def INPUT_TYPES(s): arg_dict = { "model1": ("MODEL",), "model2": ("MODEL",)} argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) arg_dict["time_embed."] = argument arg_dict["label_emb."] = argument for i in range(9): arg_dict["input_blocks.{}".format(i)] = argument for i in range(3): arg_dict["middle_block.{}".format(i)] = argument for i in range(9): arg_dict["output_blocks.{}".format(i)] = argument arg_dict["out."] = argument return {"required": arg_dict} class ModelMergeSD3_2B(comfy_extras.nodes_model_merging.ModelMergeBlocks): CATEGORY = "advanced/model_merging/model_specific" @classmethod def INPUT_TYPES(s): arg_dict = { "model1": ("MODEL",), "model2": ("MODEL",)} argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) arg_dict["pos_embed."] = argument arg_dict["x_embedder."] = argument arg_dict["context_embedder."] = argument arg_dict["y_embedder."] = argument arg_dict["t_embedder."] = argument for i in range(24): arg_dict["joint_blocks.{}.".format(i)] = argument arg_dict["final_layer."] = argument return {"required": arg_dict} NODE_CLASS_MAPPINGS = { "ModelMergeSD1": ModelMergeSD1, "ModelMergeSD2": ModelMergeSD1, "ModelMergeSDXL": ModelMergeSDXL, "ModelMergeSD3_2B": ModelMergeSD3_2B, }
import torch import comfy.model_management from kornia.morphology import dilation, erosion, opening, closing, gradient, top_hat, bottom_hat class Morphology: @classmethod def INPUT_TYPES(s): return {"required": {"image": ("IMAGE",), "operation": (["erode", "dilate", "open", "close", "gradient", "bottom_hat", "top_hat"],), "kernel_size": ("INT", {"default": 3, "min": 3, "max": 999, "step": 1}), }} RETURN_TYPES = ("IMAGE",) FUNCTION = "process" CATEGORY = "image/postprocessing" def process(self, image, operation, kernel_size): device = comfy.model_management.get_torch_device() kernel = torch.ones(kernel_size, kernel_size, device=device) image_k = image.to(device).movedim(-1, 1) if operation == "erode": output = erosion(image_k, kernel) elif operation == "dilate": output = dilation(image_k, kernel) elif operation == "open": output = opening(image_k, kernel) elif operation == "close": output = closing(image_k, kernel) elif operation == "gradient": output = gradient(image_k, kernel) elif operation == "top_hat": output = top_hat(image_k, kernel) elif operation == "bottom_hat": output = bottom_hat(image_k, kernel) else: raise ValueError(f"Invalid operation {operation} for morphology. Must be one of 'erode', 'dilate', 'open', 'close', 'gradient', 'tophat', 'bottomhat'") img_out = output.to(comfy.model_management.intermediate_device()).movedim(1, -1) return (img_out,) NODE_CLASS_MAPPINGS = { "Morphology": Morphology, } NODE_DISPLAY_NAME_MAPPINGS = { "Morphology": "ImageMorphology", }
import comfy.model_patcher import comfy.samplers class PerturbedAttentionGuidance: @classmethod def INPUT_TYPES(s): return { "required": { "model": ("MODEL",), "scale": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 100.0, "step": 0.1, "round": 0.01}), } } RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "_for_testing" def patch(self, model, scale): unet_block = "middle" unet_block_id = 0 m = model.clone() def perturbed_attention(q, k, v, extra_options, mask=None): return v def post_cfg_function(args): model = args["model"] cond_pred = args["cond_denoised"] cond = args["cond"] cfg_result = args["denoised"] sigma = args["sigma"] model_options = args["model_options"].copy() x = args["input"] if scale == 0: return cfg_result model_options = comfy.model_patcher.set_model_options_patch_replace(model_options, perturbed_attention, "attn1", unet_block, unet_block_id) (pag,) = comfy.samplers.calc_cond_batch(model, [cond], x, sigma, model_options) return cfg_result + (cond_pred - pag) * scale m.set_model_sampler_post_cfg_function(post_cfg_function) return (m,) NODE_CLASS_MAPPINGS = { "PerturbedAttentionGuidance": PerturbedAttentionGuidance, }
import torch import comfy.model_management import comfy.sampler_helpers import comfy.samplers import comfy.utils import node_helpers def perp_neg(x, noise_pred_pos, noise_pred_neg, noise_pred_nocond, neg_scale, cond_scale): pos = noise_pred_pos - noise_pred_nocond neg = noise_pred_neg - noise_pred_nocond perp = neg - ((torch.mul(neg, pos).sum())/(torch.norm(pos)**2)) * pos perp_neg = perp * neg_scale cfg_result = noise_pred_nocond + cond_scale*(pos - perp_neg) return cfg_result class PerpNeg: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL", ), "empty_conditioning": ("CONDITIONING", ), "neg_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "_for_testing" def patch(self, model, empty_conditioning, neg_scale): m = model.clone() nocond = comfy.sampler_helpers.convert_cond(empty_conditioning) def cfg_function(args): model = args["model"] noise_pred_pos = args["cond_denoised"] noise_pred_neg = args["uncond_denoised"] cond_scale = args["cond_scale"] x = args["input"] sigma = args["sigma"] model_options = args["model_options"] nocond_processed = comfy.samplers.encode_model_conds(model.extra_conds, nocond, x, x.device, "negative") (noise_pred_nocond,) = comfy.samplers.calc_cond_batch(model, [nocond_processed], x, sigma, model_options) cfg_result = x - perp_neg(x, noise_pred_pos, noise_pred_neg, noise_pred_nocond, neg_scale, cond_scale) return cfg_result m.set_model_sampler_cfg_function(cfg_function) return (m, ) class Guider_PerpNeg(comfy.samplers.CFGGuider): def set_conds(self, positive, negative, empty_negative_prompt): empty_negative_prompt = node_helpers.conditioning_set_values(empty_negative_prompt, {"prompt_type": "negative"}) self.inner_set_conds({"positive": positive, "empty_negative_prompt": empty_negative_prompt, "negative": negative}) def set_cfg(self, cfg, neg_scale): self.cfg = cfg self.neg_scale = neg_scale def predict_noise(self, x, timestep, model_options={}, seed=None): positive_cond = self.conds.get("positive", None) negative_cond = self.conds.get("negative", None) empty_cond = self.conds.get("empty_negative_prompt", None) (noise_pred_pos, noise_pred_neg, noise_pred_empty) = \ comfy.samplers.calc_cond_batch(self.inner_model, [positive_cond, negative_cond, empty_cond], x, timestep, model_options) cfg_result = perp_neg(x, noise_pred_pos, noise_pred_neg, noise_pred_empty, self.neg_scale, self.cfg) for fn in model_options.get("sampler_post_cfg_function", []): args = { "denoised": cfg_result, "cond": positive_cond, "uncond": negative_cond, "model": self.inner_model, "uncond_denoised": noise_pred_neg, "cond_denoised": noise_pred_pos, "sigma": timestep, "model_options": model_options, "input": x, "empty_cond": empty_cond, "empty_cond_denoised": noise_pred_empty,} cfg_result = fn(args) return cfg_result class PerpNegGuider: @classmethod def INPUT_TYPES(s): return {"required": {"model": ("MODEL",), "positive": ("CONDITIONING", ), "negative": ("CONDITIONING", ), "empty_conditioning": ("CONDITIONING", ), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), "neg_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}), } } RETURN_TYPES = ("GUIDER",) FUNCTION = "get_guider" CATEGORY = "_for_testing" def get_guider(self, model, positive, negative, empty_conditioning, cfg, neg_scale): guider = Guider_PerpNeg(model) guider.set_conds(positive, negative, empty_conditioning) guider.set_cfg(cfg, neg_scale) return (guider,) NODE_CLASS_MAPPINGS = { "PerpNeg": PerpNeg, "PerpNegGuider": PerpNegGuider, } NODE_DISPLAY_NAME_MAPPINGS = { "PerpNeg": "Perp-Neg (DEPRECATED by PerpNegGuider)", }
import torch import torch.nn as nn import folder_paths import comfy.clip_model import comfy.clip_vision import comfy.ops VISION_CONFIG_DICT = { "hidden_size": 1024, "image_size": 224, "intermediate_size": 4096, "num_attention_heads": 16, "num_channels": 3, "num_hidden_layers": 24, "patch_size": 14, "projection_dim": 768, "hidden_act": "quick_gelu", } class MLP(nn.Module): def __init__(self, in_dim, out_dim, hidden_dim, use_residual=True, operations=comfy.ops): super().__init__() if use_residual: assert in_dim == out_dim self.layernorm = operations.LayerNorm(in_dim) self.fc1 = operations.Linear(in_dim, hidden_dim) self.fc2 = operations.Linear(hidden_dim, out_dim) self.use_residual = use_residual self.act_fn = nn.GELU() def forward(self, x): residual = x x = self.layernorm(x) x = self.fc1(x) x = self.act_fn(x) x = self.fc2(x) if self.use_residual: x = x + residual return x class FuseModule(nn.Module): def __init__(self, embed_dim, operations): super().__init__() self.mlp1 = MLP(embed_dim * 2, embed_dim, embed_dim, use_residual=False, operations=operations) self.mlp2 = MLP(embed_dim, embed_dim, embed_dim, use_residual=True, operations=operations) self.layer_norm = operations.LayerNorm(embed_dim) def fuse_fn(self, prompt_embeds, id_embeds): stacked_id_embeds = torch.cat([prompt_embeds, id_embeds], dim=-1) stacked_id_embeds = self.mlp1(stacked_id_embeds) + prompt_embeds stacked_id_embeds = self.mlp2(stacked_id_embeds) stacked_id_embeds = self.layer_norm(stacked_id_embeds) return stacked_id_embeds def forward( self, prompt_embeds, id_embeds, class_tokens_mask, ) -> torch.Tensor: id_embeds = id_embeds.to(prompt_embeds.dtype) num_inputs = class_tokens_mask.sum().unsqueeze(0) batch_size, max_num_inputs = id_embeds.shape[:2] seq_length = prompt_embeds.shape[1] flat_id_embeds = id_embeds.view( -1, id_embeds.shape[-2], id_embeds.shape[-1] ) valid_id_mask = ( torch.arange(max_num_inputs, device=flat_id_embeds.device)[None, :] < num_inputs[:, None] ) valid_id_embeds = flat_id_embeds[valid_id_mask.flatten()] prompt_embeds = prompt_embeds.view(-1, prompt_embeds.shape[-1]) class_tokens_mask = class_tokens_mask.view(-1) valid_id_embeds = valid_id_embeds.view(-1, valid_id_embeds.shape[-1]) image_token_embeds = prompt_embeds[class_tokens_mask] stacked_id_embeds = self.fuse_fn(image_token_embeds, valid_id_embeds) assert class_tokens_mask.sum() == stacked_id_embeds.shape[0], f"{class_tokens_mask.sum()} != {stacked_id_embeds.shape[0]}" prompt_embeds.masked_scatter_(class_tokens_mask[:, None], stacked_id_embeds.to(prompt_embeds.dtype)) updated_prompt_embeds = prompt_embeds.view(batch_size, seq_length, -1) return updated_prompt_embeds class PhotoMakerIDEncoder(comfy.clip_model.CLIPVisionModelProjection): def __init__(self): self.load_device = comfy.model_management.text_encoder_device() offload_device = comfy.model_management.text_encoder_offload_device() dtype = comfy.model_management.text_encoder_dtype(self.load_device) super().__init__(VISION_CONFIG_DICT, dtype, offload_device, comfy.ops.manual_cast) self.visual_projection_2 = comfy.ops.manual_cast.Linear(1024, 1280, bias=False) self.fuse_module = FuseModule(2048, comfy.ops.manual_cast) def forward(self, id_pixel_values, prompt_embeds, class_tokens_mask): b, num_inputs, c, h, w = id_pixel_values.shape id_pixel_values = id_pixel_values.view(b * num_inputs, c, h, w) shared_id_embeds = self.vision_model(id_pixel_values)[2] id_embeds = self.visual_projection(shared_id_embeds) id_embeds_2 = self.visual_projection_2(shared_id_embeds) id_embeds = id_embeds.view(b, num_inputs, 1, -1) id_embeds_2 = id_embeds_2.view(b, num_inputs, 1, -1) id_embeds = torch.cat((id_embeds, id_embeds_2), dim=-1) updated_prompt_embeds = self.fuse_module(prompt_embeds, id_embeds, class_tokens_mask) return updated_prompt_embeds class PhotoMakerLoader: @classmethod def INPUT_TYPES(s): return {"required": { "photomaker_model_name": (folder_paths.get_filename_list("photomaker"), )}} RETURN_TYPES = ("PHOTOMAKER",) FUNCTION = "load_photomaker_model" CATEGORY = "_for_testing/photomaker" def load_photomaker_model(self, photomaker_model_name): photomaker_model_path = folder_paths.get_full_path("photomaker", photomaker_model_name) photomaker_model = PhotoMakerIDEncoder() data = comfy.utils.load_torch_file(photomaker_model_path, safe_load=True) if "id_encoder" in data: data = data["id_encoder"] photomaker_model.load_state_dict(data) return (photomaker_model,) class PhotoMakerEncode: @classmethod def INPUT_TYPES(s): return {"required": { "photomaker": ("PHOTOMAKER",), "image": ("IMAGE",), "clip": ("CLIP", ), "text": ("STRING", {"multiline": True, "dynamicPrompts": True, "default": "photograph of photomaker"}), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "apply_photomaker" CATEGORY = "_for_testing/photomaker" def apply_photomaker(self, photomaker, image, clip, text): special_token = "photomaker" pixel_values = comfy.clip_vision.clip_preprocess(image.to(photomaker.load_device)).float() try: index = text.split(" ").index(special_token) + 1 except ValueError: index = -1 tokens = clip.tokenize(text, return_word_ids=True) out_tokens = {} for k in tokens: out_tokens[k] = [] for t in tokens[k]: f = list(filter(lambda x: x[2] != index, t)) while len(f) < len(t): f.append(t[-1]) out_tokens[k].append(f) cond, pooled = clip.encode_from_tokens(out_tokens, return_pooled=True) if index > 0: token_index = index - 1 num_id_images = 1 class_tokens_mask = [True if token_index <= i < token_index+num_id_images else False for i in range(77)] out = photomaker(id_pixel_values=pixel_values.unsqueeze(0), prompt_embeds=cond.to(photomaker.load_device), class_tokens_mask=torch.tensor(class_tokens_mask, dtype=torch.bool, device=photomaker.load_device).unsqueeze(0)) else: out = cond return ([[out, {"pooled_output": pooled}]], ) NODE_CLASS_MAPPINGS = { "PhotoMakerLoader": PhotoMakerLoader, "PhotoMakerEncode": PhotoMakerEncode, }
import numpy as np import torch import torch.nn.functional as F from PIL import Image import math import comfy.utils import comfy.model_management class Blend: def __init__(self): pass @classmethod def INPUT_TYPES(s): return { "required": { "image1": ("IMAGE",), "image2": ("IMAGE",), "blend_factor": ("FLOAT", { "default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01 }), "blend_mode": (["normal", "multiply", "screen", "overlay", "soft_light", "difference"],), }, } RETURN_TYPES = ("IMAGE",) FUNCTION = "blend_images" CATEGORY = "image/postprocessing" def blend_images(self, image1: torch.Tensor, image2: torch.Tensor, blend_factor: float, blend_mode: str): image2 = image2.to(image1.device) if image1.shape != image2.shape: image2 = image2.permute(0, 3, 1, 2) image2 = comfy.utils.common_upscale(image2, image1.shape[2], image1.shape[1], upscale_method='bicubic', crop='center') image2 = image2.permute(0, 2, 3, 1) blended_image = self.blend_mode(image1, image2, blend_mode) blended_image = image1 * (1 - blend_factor) + blended_image * blend_factor blended_image = torch.clamp(blended_image, 0, 1) return (blended_image,) def blend_mode(self, img1, img2, mode): if mode == "normal": return img2 elif mode == "multiply": return img1 * img2 elif mode == "screen": return 1 - (1 - img1) * (1 - img2) elif mode == "overlay": return torch.where(img1 <= 0.5, 2 * img1 * img2, 1 - 2 * (1 - img1) * (1 - img2)) elif mode == "soft_light": return torch.where(img2 <= 0.5, img1 - (1 - 2 * img2) * img1 * (1 - img1), img1 + (2 * img2 - 1) * (self.g(img1) - img1)) elif mode == "difference": return img1 - img2 else: raise ValueError(f"Unsupported blend mode: {mode}") def g(self, x): return torch.where(x <= 0.25, ((16 * x - 12) * x + 4) * x, torch.sqrt(x)) def gaussian_kernel(kernel_size: int, sigma: float, device=None): x, y = torch.meshgrid(torch.linspace(-1, 1, kernel_size, device=device), torch.linspace(-1, 1, kernel_size, device=device), indexing="ij") d = torch.sqrt(x * x + y * y) g = torch.exp(-(d * d) / (2.0 * sigma * sigma)) return g / g.sum() class Blur: def __init__(self): pass @classmethod def INPUT_TYPES(s): return { "required": { "image": ("IMAGE",), "blur_radius": ("INT", { "default": 1, "min": 1, "max": 31, "step": 1 }), "sigma": ("FLOAT", { "default": 1.0, "min": 0.1, "max": 10.0, "step": 0.1 }), }, } RETURN_TYPES = ("IMAGE",) FUNCTION = "blur" CATEGORY = "image/postprocessing" def blur(self, image: torch.Tensor, blur_radius: int, sigma: float): if blur_radius == 0: return (image,) image = image.to(comfy.model_management.get_torch_device()) batch_size, height, width, channels = image.shape kernel_size = blur_radius * 2 + 1 kernel = gaussian_kernel(kernel_size, sigma, device=image.device).repeat(channels, 1, 1).unsqueeze(1) image = image.permute(0, 3, 1, 2) padded_image = F.pad(image, (blur_radius,blur_radius,blur_radius,blur_radius), 'reflect') blurred = F.conv2d(padded_image, kernel, padding=kernel_size blurred = blurred.permute(0, 2, 3, 1) return (blurred.to(comfy.model_management.intermediate_device()),) class Quantize: def __init__(self): pass @classmethod def INPUT_TYPES(s): return { "required": { "image": ("IMAGE",), "colors": ("INT", { "default": 256, "min": 1, "max": 256, "step": 1 }), "dither": (["none", "floyd-steinberg", "bayer-2", "bayer-4", "bayer-8", "bayer-16"],), }, } RETURN_TYPES = ("IMAGE",) FUNCTION = "quantize" CATEGORY = "image/postprocessing" def bayer(im, pal_im, order): def normalized_bayer_matrix(n): if n == 0: return np.zeros((1,1), "float32") else: q = 4 ** n m = q * normalized_bayer_matrix(n - 1) return np.bmat(((m-1.5, m+0.5), (m+1.5, m-0.5))) / q num_colors = len(pal_im.getpalette()) spread = 2 * 256 / num_colors bayer_n = int(math.log2(order)) bayer_matrix = torch.from_numpy(spread * normalized_bayer_matrix(bayer_n) + 0.5) result = torch.from_numpy(np.array(im).astype(np.float32)) tw = math.ceil(result.shape[0] / bayer_matrix.shape[0]) th = math.ceil(result.shape[1] / bayer_matrix.shape[1]) tiled_matrix = bayer_matrix.tile(tw, th).unsqueeze(-1) result.add_(tiled_matrix[:result.shape[0],:result.shape[1]]).clamp_(0, 255) result = result.to(dtype=torch.uint8) im = Image.fromarray(result.cpu().numpy()) im = im.quantize(palette=pal_im, dither=Image.Dither.NONE) return im def quantize(self, image: torch.Tensor, colors: int, dither: str): batch_size, height, width, _ = image.shape result = torch.zeros_like(image) for b in range(batch_size): im = Image.fromarray((image[b] * 255).to(torch.uint8).numpy(), mode='RGB') pal_im = im.quantize(colors=colors) if dither == "none": quantized_image = im.quantize(palette=pal_im, dither=Image.Dither.NONE) elif dither == "floyd-steinberg": quantized_image = im.quantize(palette=pal_im, dither=Image.Dither.FLOYDSTEINBERG) elif dither.startswith("bayer"): order = int(dither.split('-')[-1]) quantized_image = Quantize.bayer(im, pal_im, order) quantized_array = torch.tensor(np.array(quantized_image.convert("RGB"))).float() / 255 result[b] = quantized_array return (result,) class Sharpen: def __init__(self): pass @classmethod def INPUT_TYPES(s): return { "required": { "image": ("IMAGE",), "sharpen_radius": ("INT", { "default": 1, "min": 1, "max": 31, "step": 1 }), "sigma": ("FLOAT", { "default": 1.0, "min": 0.1, "max": 10.0, "step": 0.01 }), "alpha": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 5.0, "step": 0.01 }), }, } RETURN_TYPES = ("IMAGE",) FUNCTION = "sharpen" CATEGORY = "image/postprocessing" def sharpen(self, image: torch.Tensor, sharpen_radius: int, sigma:float, alpha: float): if sharpen_radius == 0: return (image,) batch_size, height, width, channels = image.shape image = image.to(comfy.model_management.get_torch_device()) kernel_size = sharpen_radius * 2 + 1 kernel = gaussian_kernel(kernel_size, sigma, device=image.device) * -(alpha*10) center = kernel_size kernel[center, center] = kernel[center, center] - kernel.sum() + 1.0 kernel = kernel.repeat(channels, 1, 1).unsqueeze(1) tensor_image = image.permute(0, 3, 1, 2) tensor_image = F.pad(tensor_image, (sharpen_radius,sharpen_radius,sharpen_radius,sharpen_radius), 'reflect') sharpened = F.conv2d(tensor_image, kernel, padding=center, groups=channels)[:,:,sharpen_radius:-sharpen_radius, sharpen_radius:-sharpen_radius] sharpened = sharpened.permute(0, 2, 3, 1) result = torch.clamp(sharpened, 0, 1) return (result.to(comfy.model_management.intermediate_device()),) class ImageScaleToTotalPixels: upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] crop_methods = ["disabled", "center"] @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",), "upscale_method": (s.upscale_methods,), "megapixels": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 16.0, "step": 0.01}), }} RETURN_TYPES = ("IMAGE",) FUNCTION = "upscale" CATEGORY = "image/upscaling" def upscale(self, image, upscale_method, megapixels): samples = image.movedim(-1,1) total = int(megapixels * 1024 * 1024) scale_by = math.sqrt(total / (samples.shape[3] * samples.shape[2])) width = round(samples.shape[3] * scale_by) height = round(samples.shape[2] * scale_by) s = comfy.utils.common_upscale(samples, width, height, upscale_method, "disabled") s = s.movedim(1,-1) return (s,) NODE_CLASS_MAPPINGS = { "ImageBlend": Blend, "ImageBlur": Blur, "ImageQuantize": Quantize, "ImageSharpen": Sharpen, "ImageScaleToTotalPixels": ImageScaleToTotalPixels, }
import torch class LatentRebatch: @classmethod def INPUT_TYPES(s): return {"required": { "latents": ("LATENT",), "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), }} RETURN_TYPES = ("LATENT",) INPUT_IS_LIST = True OUTPUT_IS_LIST = (True, ) FUNCTION = "rebatch" CATEGORY = "latent/batch" @staticmethod def get_batch(latents, list_ind, offset): '''prepare a batch out of the list of latents''' samples = latents[list_ind]['samples'] shape = samples.shape mask = latents[list_ind]['noise_mask'] if 'noise_mask' in latents[list_ind] else torch.ones((shape[0], 1, shape[2]*8, shape[3]*8), device='cpu') if mask.shape[-1] != shape[-1] * 8 or mask.shape[-2] != shape[-2]: torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[-2]*8, shape[-1]*8), mode="bilinear") if mask.shape[0] < samples.shape[0]: mask = mask.repeat((shape[0] - 1) if 'batch_index' in latents[list_ind]: batch_inds = latents[list_ind]['batch_index'] else: batch_inds = [x+offset for x in range(shape[0])] return samples, mask, batch_inds @staticmethod def get_slices(indexable, num, batch_size): '''divides an indexable object into num slices of length batch_size, and a remainder''' slices = [] for i in range(num): slices.append(indexable[i*batch_size:(i+1)*batch_size]) if num * batch_size < len(indexable): return slices, indexable[num * batch_size:] else: return slices, None @staticmethod def slice_batch(batch, num, batch_size): result = [LatentRebatch.get_slices(x, num, batch_size) for x in batch] return list(zip(*result)) @staticmethod def cat_batch(batch1, batch2): if batch1[0] is None: return batch2 result = [torch.cat((b1, b2)) if torch.is_tensor(b1) else b1 + b2 for b1, b2 in zip(batch1, batch2)] return result def rebatch(self, latents, batch_size): batch_size = batch_size[0] output_list = [] current_batch = (None, None, None) processed = 0 for i in range(len(latents)): next_batch = self.get_batch(latents, i, processed) processed += len(next_batch[2]) if current_batch[0] is None: current_batch = next_batch elif next_batch[0].shape[-1] != current_batch[0].shape[-1] or next_batch[0].shape[-2] != current_batch[0].shape[-2]: sliced, _ = self.slice_batch(current_batch, 1, batch_size) output_list.append({'samples': sliced[0][0], 'noise_mask': sliced[1][0], 'batch_index': sliced[2][0]}) current_batch = next_batch else: current_batch = self.cat_batch(current_batch, next_batch) if current_batch[0].shape[0] > batch_size: num = current_batch[0].shape[0] sliced, remainder = self.slice_batch(current_batch, num, batch_size) for i in range(num): output_list.append({'samples': sliced[0][i], 'noise_mask': sliced[1][i], 'batch_index': sliced[2][i]}) current_batch = remainder if current_batch[0] is not None: sliced, _ = self.slice_batch(current_batch, 1, batch_size) output_list.append({'samples': sliced[0][0], 'noise_mask': sliced[1][0], 'batch_index': sliced[2][0]}) for s in output_list: if s['noise_mask'].mean() == 1.0: del s['noise_mask'] return (output_list,) class ImageRebatch: @classmethod def INPUT_TYPES(s): return {"required": { "images": ("IMAGE",), "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), }} RETURN_TYPES = ("IMAGE",) INPUT_IS_LIST = True OUTPUT_IS_LIST = (True, ) FUNCTION = "rebatch" CATEGORY = "image/batch" def rebatch(self, images, batch_size): batch_size = batch_size[0] output_list = [] all_images = [] for img in images: for i in range(img.shape[0]): all_images.append(img[i:i+1]) for i in range(0, len(all_images), batch_size): output_list.append(torch.cat(all_images[i:i+batch_size], dim=0)) return (output_list,) NODE_CLASS_MAPPINGS = { "RebatchLatents": LatentRebatch, "RebatchImages": ImageRebatch, } NODE_DISPLAY_NAME_MAPPINGS = { "RebatchLatents": "Rebatch Latents", "RebatchImages": "Rebatch Images", }
import torch from torch import einsum import torch.nn.functional as F import math from einops import rearrange, repeat from comfy.ldm.modules.attention import optimized_attention import comfy.samplers def attention_basic_with_sim(q, k, v, heads, mask=None, attn_precision=None): b, _, dim_head = q.shape dim_head scale = dim_head ** -0.5 h = heads q, k, v = map( lambda t: t.unsqueeze(3) .reshape(b, -1, heads, dim_head) .permute(0, 2, 1, 3) .reshape(b * heads, -1, dim_head) .contiguous(), (q, k, v), ) if attn_precision == torch.float32: sim = einsum('b i d, b j d -> b i j', q.float(), k.float()) * scale else: sim = einsum('b i d, b j d -> b i j', q, k) * scale del q, k if mask is not None: mask = rearrange(mask, 'b ... -> b (...)') max_neg_value = -torch.finfo(sim.dtype).max mask = repeat(mask, 'b j -> (b h) () j', h=h) sim.masked_fill_(~mask, max_neg_value) sim = sim.softmax(dim=-1) out = einsum('b i j, b j d -> b i d', sim.to(v.dtype), v) out = ( out.unsqueeze(0) .reshape(b, heads, -1, dim_head) .permute(0, 2, 1, 3) .reshape(b, -1, heads * dim_head) ) return (out, sim) def create_blur_map(x0, attn, sigma=3.0, threshold=1.0): _, hw1, hw2 = attn.shape b, _, lh, lw = x0.shape attn = attn.reshape(b, -1, hw1, hw2) mask = attn.mean(1, keepdim=False).sum(1, keepdim=False) > threshold ratio = 2**(math.ceil(math.sqrt(lh * lw / hw1)) - 1).bit_length() mid_shape = [math.ceil(lh / ratio), math.ceil(lw / ratio)] mask = ( mask.reshape(b, *mid_shape) .unsqueeze(1) .type(attn.dtype) ) mask = F.interpolate(mask, (lh, lw)) blurred = gaussian_blur_2d(x0, kernel_size=9, sigma=sigma) blurred = blurred * mask + x0 * (1 - mask) return blurred def gaussian_blur_2d(img, kernel_size, sigma): ksize_half = (kernel_size - 1) * 0.5 x = torch.linspace(-ksize_half, ksize_half, steps=kernel_size) pdf = torch.exp(-0.5 * (x / sigma).pow(2)) x_kernel = pdf / pdf.sum() x_kernel = x_kernel.to(device=img.device, dtype=img.dtype) kernel2d = torch.mm(x_kernel[:, None], x_kernel[None, :]) kernel2d = kernel2d.expand(img.shape[-3], 1, kernel2d.shape[0], kernel2d.shape[1]) padding = [kernel_size img = F.pad(img, padding, mode="reflect") img = F.conv2d(img, kernel2d, groups=img.shape[-3]) return img class SelfAttentionGuidance: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "scale": ("FLOAT", {"default": 0.5, "min": -2.0, "max": 5.0, "step": 0.1}), "blur_sigma": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 10.0, "step": 0.1}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "_for_testing" def patch(self, model, scale, blur_sigma): m = model.clone() attn_scores = None def attn_and_record(q, k, v, extra_options): nonlocal attn_scores heads = extra_options["n_heads"] cond_or_uncond = extra_options["cond_or_uncond"] b = q.shape[0] if 1 in cond_or_uncond: uncond_index = cond_or_uncond.index(1) (out, sim) = attention_basic_with_sim(q, k, v, heads=heads, attn_precision=extra_options["attn_precision"]) n_slices = heads * b attn_scores = sim[n_slices * uncond_index:n_slices * (uncond_index+1)] return out else: return optimized_attention(q, k, v, heads=heads, attn_precision=extra_options["attn_precision"]) def post_cfg_function(args): nonlocal attn_scores uncond_attn = attn_scores sag_scale = scale sag_sigma = blur_sigma sag_threshold = 1.0 model = args["model"] uncond_pred = args["uncond_denoised"] uncond = args["uncond"] cfg_result = args["denoised"] sigma = args["sigma"] model_options = args["model_options"] x = args["input"] if min(cfg_result.shape[2:]) <= 4: return cfg_result degraded = create_blur_map(uncond_pred, uncond_attn, sag_sigma, sag_threshold) degraded_noised = degraded + x - uncond_pred (sag,) = comfy.samplers.calc_cond_batch(model, [uncond], degraded_noised, sigma, model_options) return cfg_result + (degraded - sag) * sag_scale m.set_model_sampler_post_cfg_function(post_cfg_function, disable_cfg1_optimization=True) m.set_model_attn1_replace(attn_and_record, "middle", 0, 0) return (m, ) NODE_CLASS_MAPPINGS = { "SelfAttentionGuidance": SelfAttentionGuidance, } NODE_DISPLAY_NAME_MAPPINGS = { "SelfAttentionGuidance": "Self-Attention Guidance", }
import folder_paths import comfy.sd import comfy.model_management import nodes import torch class TripleCLIPLoader: @classmethod def INPUT_TYPES(s): return {"required": { "clip_name1": (folder_paths.get_filename_list("clip"), ), "clip_name2": (folder_paths.get_filename_list("clip"), ), "clip_name3": (folder_paths.get_filename_list("clip"), ) }} RETURN_TYPES = ("CLIP",) FUNCTION = "load_clip" CATEGORY = "advanced/loaders" def load_clip(self, clip_name1, clip_name2, clip_name3): clip_path1 = folder_paths.get_full_path("clip", clip_name1) clip_path2 = folder_paths.get_full_path("clip", clip_name2) clip_path3 = folder_paths.get_full_path("clip", clip_name3) clip = comfy.sd.load_clip(ckpt_paths=[clip_path1, clip_path2, clip_path3], embedding_directory=folder_paths.get_folder_paths("embeddings")) return (clip,) class EmptySD3LatentImage: def __init__(self): self.device = comfy.model_management.intermediate_device() @classmethod def INPUT_TYPES(s): return {"required": { "width": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096})}} RETURN_TYPES = ("LATENT",) FUNCTION = "generate" CATEGORY = "latent/sd3" def generate(self, width, height, batch_size=1): latent = torch.ones([batch_size, 16, height return ({"samples":latent}, ) class CLIPTextEncodeSD3: @classmethod def INPUT_TYPES(s): return {"required": { "clip": ("CLIP", ), "clip_l": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip_g": ("STRING", {"multiline": True, "dynamicPrompts": True}), "t5xxl": ("STRING", {"multiline": True, "dynamicPrompts": True}), "empty_padding": (["none", "empty_prompt"], ) }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "encode" CATEGORY = "advanced/conditioning" def encode(self, clip, clip_l, clip_g, t5xxl, empty_padding): no_padding = empty_padding == "none" tokens = clip.tokenize(clip_g) if len(clip_g) == 0 and no_padding: tokens["g"] = [] if len(clip_l) == 0 and no_padding: tokens["l"] = [] else: tokens["l"] = clip.tokenize(clip_l)["l"] if len(t5xxl) == 0 and no_padding: tokens["t5xxl"] = [] else: tokens["t5xxl"] = clip.tokenize(t5xxl)["t5xxl"] if len(tokens["l"]) != len(tokens["g"]): empty = clip.tokenize("") while len(tokens["l"]) < len(tokens["g"]): tokens["l"] += empty["l"] while len(tokens["l"]) > len(tokens["g"]): tokens["g"] += empty["g"] cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) return ([[cond, {"pooled_output": pooled}]], ) NODE_CLASS_MAPPINGS = { "TripleCLIPLoader": TripleCLIPLoader, "EmptySD3LatentImage": EmptySD3LatentImage, "CLIPTextEncodeSD3": CLIPTextEncodeSD3, }
import torch import comfy.utils class SD_4XUpscale_Conditioning: @classmethod def INPUT_TYPES(s): return {"required": { "images": ("IMAGE",), "positive": ("CONDITIONING",), "negative": ("CONDITIONING",), "scale_ratio": ("FLOAT", {"default": 4.0, "min": 0.0, "max": 10.0, "step": 0.01}), "noise_augmentation": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), }} RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") RETURN_NAMES = ("positive", "negative", "latent") FUNCTION = "encode" CATEGORY = "conditioning/upscale_diffusion" def encode(self, images, positive, negative, scale_ratio, noise_augmentation): width = max(1, round(images.shape[-2] * scale_ratio)) height = max(1, round(images.shape[-3] * scale_ratio)) pixels = comfy.utils.common_upscale((images.movedim(-1,1) * 2.0) - 1.0, width out_cp = [] out_cn = [] for t in positive: n = [t[0], t[1].copy()] n[1]['concat_image'] = pixels n[1]['noise_augmentation'] = noise_augmentation out_cp.append(n) for t in negative: n = [t[0], t[1].copy()] n[1]['concat_image'] = pixels n[1]['noise_augmentation'] = noise_augmentation out_cn.append(n) latent = torch.zeros([images.shape[0], 4, height return (out_cp, out_cn, {"samples":latent}) NODE_CLASS_MAPPINGS = { "SD_4XUpscale_Conditioning": SD_4XUpscale_Conditioning, }
import torch import nodes import comfy.utils def camera_embeddings(elevation, azimuth): elevation = torch.as_tensor([elevation]) azimuth = torch.as_tensor([azimuth]) embeddings = torch.stack( [ torch.deg2rad( (90 - elevation) - (90) ), torch.sin(torch.deg2rad(azimuth)), torch.cos(torch.deg2rad(azimuth)), torch.deg2rad( 90 - torch.full_like(elevation, 0) ), ], dim=-1).unsqueeze(1) return embeddings class StableZero123_Conditioning: @classmethod def INPUT_TYPES(s): return {"required": { "clip_vision": ("CLIP_VISION",), "init_image": ("IMAGE",), "vae": ("VAE",), "width": ("INT", {"default": 256, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 256, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), "elevation": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), "azimuth": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), }} RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") RETURN_NAMES = ("positive", "negative", "latent") FUNCTION = "encode" CATEGORY = "conditioning/3d_models" def encode(self, clip_vision, init_image, vae, width, height, batch_size, elevation, azimuth): output = clip_vision.encode_image(init_image) pooled = output.image_embeds.unsqueeze(0) pixels = comfy.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) encode_pixels = pixels[:,:,:,:3] t = vae.encode(encode_pixels) cam_embeds = camera_embeddings(elevation, azimuth) cond = torch.cat([pooled, cam_embeds.to(pooled.device).repeat((pooled.shape[0], 1, 1))], dim=-1) positive = [[cond, {"concat_latent_image": t}]] negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t)}]] latent = torch.zeros([batch_size, 4, height return (positive, negative, {"samples":latent}) class StableZero123_Conditioning_Batched: @classmethod def INPUT_TYPES(s): return {"required": { "clip_vision": ("CLIP_VISION",), "init_image": ("IMAGE",), "vae": ("VAE",), "width": ("INT", {"default": 256, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 256, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), "elevation": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), "azimuth": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), "elevation_batch_increment": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), "azimuth_batch_increment": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), }} RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") RETURN_NAMES = ("positive", "negative", "latent") FUNCTION = "encode" CATEGORY = "conditioning/3d_models" def encode(self, clip_vision, init_image, vae, width, height, batch_size, elevation, azimuth, elevation_batch_increment, azimuth_batch_increment): output = clip_vision.encode_image(init_image) pooled = output.image_embeds.unsqueeze(0) pixels = comfy.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) encode_pixels = pixels[:,:,:,:3] t = vae.encode(encode_pixels) cam_embeds = [] for i in range(batch_size): cam_embeds.append(camera_embeddings(elevation, azimuth)) elevation += elevation_batch_increment azimuth += azimuth_batch_increment cam_embeds = torch.cat(cam_embeds, dim=0) cond = torch.cat([comfy.utils.repeat_to_batch_size(pooled, batch_size), cam_embeds], dim=-1) positive = [[cond, {"concat_latent_image": t}]] negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t)}]] latent = torch.zeros([batch_size, 4, height return (positive, negative, {"samples":latent, "batch_index": [0] * batch_size}) class SV3D_Conditioning: @classmethod def INPUT_TYPES(s): return {"required": { "clip_vision": ("CLIP_VISION",), "init_image": ("IMAGE",), "vae": ("VAE",), "width": ("INT", {"default": 576, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 576, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "video_frames": ("INT", {"default": 21, "min": 1, "max": 4096}), "elevation": ("FLOAT", {"default": 0.0, "min": -90.0, "max": 90.0, "step": 0.1, "round": False}), }} RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") RETURN_NAMES = ("positive", "negative", "latent") FUNCTION = "encode" CATEGORY = "conditioning/3d_models" def encode(self, clip_vision, init_image, vae, width, height, video_frames, elevation): output = clip_vision.encode_image(init_image) pooled = output.image_embeds.unsqueeze(0) pixels = comfy.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) encode_pixels = pixels[:,:,:,:3] t = vae.encode(encode_pixels) azimuth = 0 azimuth_increment = 360 / (max(video_frames, 2) - 1) elevations = [] azimuths = [] for i in range(video_frames): elevations.append(elevation) azimuths.append(azimuth) azimuth += azimuth_increment positive = [[pooled, {"concat_latent_image": t, "elevation": elevations, "azimuth": azimuths}]] negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t), "elevation": elevations, "azimuth": azimuths}]] latent = torch.zeros([video_frames, 4, height return (positive, negative, {"samples":latent}) NODE_CLASS_MAPPINGS = { "StableZero123_Conditioning": StableZero123_Conditioning, "StableZero123_Conditioning_Batched": StableZero123_Conditioning_Batched, "SV3D_Conditioning": SV3D_Conditioning, }
""" This file is part of ComfyUI. Copyright (C) 2024 Stability AI This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https: """ import torch import nodes import comfy.utils class StableCascade_EmptyLatentImage: def __init__(self, device="cpu"): self.device = device @classmethod def INPUT_TYPES(s): return {"required": { "width": ("INT", {"default": 1024, "min": 256, "max": nodes.MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 1024, "min": 256, "max": nodes.MAX_RESOLUTION, "step": 8}), "compression": ("INT", {"default": 42, "min": 4, "max": 128, "step": 1}), "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}) }} RETURN_TYPES = ("LATENT", "LATENT") RETURN_NAMES = ("stage_c", "stage_b") FUNCTION = "generate" CATEGORY = "latent/stable_cascade" def generate(self, width, height, compression, batch_size=1): c_latent = torch.zeros([batch_size, 16, height b_latent = torch.zeros([batch_size, 4, height return ({ "samples": c_latent, }, { "samples": b_latent, }) class StableCascade_StageC_VAEEncode: def __init__(self, device="cpu"): self.device = device @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",), "vae": ("VAE", ), "compression": ("INT", {"default": 42, "min": 4, "max": 128, "step": 1}), }} RETURN_TYPES = ("LATENT", "LATENT") RETURN_NAMES = ("stage_c", "stage_b") FUNCTION = "generate" CATEGORY = "latent/stable_cascade" def generate(self, image, vae, compression): width = image.shape[-2] height = image.shape[-3] out_width = (width out_height = (height s = comfy.utils.common_upscale(image.movedim(-1,1), out_width, out_height, "bicubic", "center").movedim(1,-1) c_latent = vae.encode(s[:,:,:,:3]) b_latent = torch.zeros([c_latent.shape[0], 4, (height return ({ "samples": c_latent, }, { "samples": b_latent, }) class StableCascade_StageB_Conditioning: @classmethod def INPUT_TYPES(s): return {"required": { "conditioning": ("CONDITIONING",), "stage_c": ("LATENT",), }} RETURN_TYPES = ("CONDITIONING",) FUNCTION = "set_prior" CATEGORY = "conditioning/stable_cascade" def set_prior(self, conditioning, stage_c): c = [] for t in conditioning: d = t[1].copy() d['stable_cascade_prior'] = stage_c['samples'] n = [t[0], d] c.append(n) return (c, ) class StableCascade_SuperResolutionControlnet: def __init__(self, device="cpu"): self.device = device @classmethod def INPUT_TYPES(s): return {"required": { "image": ("IMAGE",), "vae": ("VAE", ), }} RETURN_TYPES = ("IMAGE", "LATENT", "LATENT") RETURN_NAMES = ("controlnet_input", "stage_c", "stage_b") FUNCTION = "generate" CATEGORY = "_for_testing/stable_cascade" def generate(self, image, vae): width = image.shape[-2] height = image.shape[-3] batch_size = image.shape[0] controlnet_input = vae.encode(image[:,:,:,:3]).movedim(1, -1) c_latent = torch.zeros([batch_size, 16, height b_latent = torch.zeros([batch_size, 4, height return (controlnet_input, { "samples": c_latent, }, { "samples": b_latent, }) NODE_CLASS_MAPPINGS = { "StableCascade_EmptyLatentImage": StableCascade_EmptyLatentImage, "StableCascade_StageB_Conditioning": StableCascade_StageB_Conditioning, "StableCascade_StageC_VAEEncode": StableCascade_StageC_VAEEncode, "StableCascade_SuperResolutionControlnet": StableCascade_SuperResolutionControlnet, }
import torch from typing import Tuple, Callable import math def do_nothing(x: torch.Tensor, mode:str=None): return x def mps_gather_workaround(input, dim, index): if input.shape[-1] == 1: return torch.gather( input.unsqueeze(-1), dim - 1 if dim < 0 else dim, index.unsqueeze(-1) ).squeeze(-1) else: return torch.gather(input, dim, index) def bipartite_soft_matching_random2d(metric: torch.Tensor, w: int, h: int, sx: int, sy: int, r: int, no_rand: bool = False) -> Tuple[Callable, Callable]: """ Partitions the tokens into src and dst and merges r tokens from src to dst. Dst tokens are partitioned by choosing one randomy in each (sx, sy) region. Args: - metric [B, N, C]: metric to use for similarity - w: image width in tokens - h: image height in tokens - sx: stride in the x dimension for dst, must divide w - sy: stride in the y dimension for dst, must divide h - r: number of tokens to remove (by merging) - no_rand: if true, disable randomness (use top left corner only) """ B, N, _ = metric.shape if r <= 0 or w == 1 or h == 1: return do_nothing, do_nothing gather = mps_gather_workaround if metric.device.type == "mps" else torch.gather with torch.no_grad(): hsy, wsx = h if no_rand: rand_idx = torch.zeros(hsy, wsx, 1, device=metric.device, dtype=torch.int64) else: rand_idx = torch.randint(sy*sx, size=(hsy, wsx, 1), device=metric.device) idx_buffer_view = torch.zeros(hsy, wsx, sy*sx, device=metric.device, dtype=torch.int64) idx_buffer_view.scatter_(dim=2, index=rand_idx, src=-torch.ones_like(rand_idx, dtype=rand_idx.dtype)) idx_buffer_view = idx_buffer_view.view(hsy, wsx, sy, sx).transpose(1, 2).reshape(hsy * sy, wsx * sx) if (hsy * sy) < h or (wsx * sx) < w: idx_buffer = torch.zeros(h, w, device=metric.device, dtype=torch.int64) idx_buffer[:(hsy * sy), :(wsx * sx)] = idx_buffer_view else: idx_buffer = idx_buffer_view rand_idx = idx_buffer.reshape(1, -1, 1).argsort(dim=1) del idx_buffer, idx_buffer_view num_dst = hsy * wsx a_idx = rand_idx[:, num_dst:, :] b_idx = rand_idx[:, :num_dst, :] def split(x): C = x.shape[-1] src = gather(x, dim=1, index=a_idx.expand(B, N - num_dst, C)) dst = gather(x, dim=1, index=b_idx.expand(B, num_dst, C)) return src, dst metric = metric / metric.norm(dim=-1, keepdim=True) a, b = split(metric) scores = a @ b.transpose(-1, -2) r = min(a.shape[1], r) node_max, node_idx = scores.max(dim=-1) edge_idx = node_max.argsort(dim=-1, descending=True)[..., None] unm_idx = edge_idx[..., r:, :] src_idx = edge_idx[..., :r, :] dst_idx = gather(node_idx[..., None], dim=-2, index=src_idx) def merge(x: torch.Tensor, mode="mean") -> torch.Tensor: src, dst = split(x) n, t1, c = src.shape unm = gather(src, dim=-2, index=unm_idx.expand(n, t1 - r, c)) src = gather(src, dim=-2, index=src_idx.expand(n, r, c)) dst = dst.scatter_reduce(-2, dst_idx.expand(n, r, c), src, reduce=mode) return torch.cat([unm, dst], dim=1) def unmerge(x: torch.Tensor) -> torch.Tensor: unm_len = unm_idx.shape[1] unm, dst = x[..., :unm_len, :], x[..., unm_len:, :] _, _, c = unm.shape src = gather(dst, dim=-2, index=dst_idx.expand(B, r, c)) out = torch.zeros(B, N, c, device=x.device, dtype=x.dtype) out.scatter_(dim=-2, index=b_idx.expand(B, num_dst, c), src=dst) out.scatter_(dim=-2, index=gather(a_idx.expand(B, a_idx.shape[1], 1), dim=1, index=unm_idx).expand(B, unm_len, c), src=unm) out.scatter_(dim=-2, index=gather(a_idx.expand(B, a_idx.shape[1], 1), dim=1, index=src_idx).expand(B, r, c), src=src) return out return merge, unmerge def get_functions(x, ratio, original_shape): b, c, original_h, original_w = original_shape original_tokens = original_h * original_w downsample = int(math.ceil(math.sqrt(original_tokens stride_x = 2 stride_y = 2 max_downsample = 1 if downsample <= max_downsample: w = int(math.ceil(original_w / downsample)) h = int(math.ceil(original_h / downsample)) r = int(x.shape[1] * ratio) no_rand = False m, u = bipartite_soft_matching_random2d(x, w, h, stride_x, stride_y, r, no_rand) return m, u nothing = lambda y: y return nothing, nothing class TomePatchModel: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "ratio": ("FLOAT", {"default": 0.3, "min": 0.0, "max": 1.0, "step": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "_for_testing" def patch(self, model, ratio): self.u = None def tomesd_m(q, k, v, extra_options): m, self.u = get_functions(q, ratio, extra_options["original_shape"]) return m(q), k, v def tomesd_u(n, extra_options): return self.u(n) m = model.clone() m.set_model_attn1_patch(tomesd_m) m.set_model_attn1_output_patch(tomesd_u) return (m, ) NODE_CLASS_MAPPINGS = { "TomePatchModel": TomePatchModel, }
import os import logging from spandrel import ModelLoader, ImageModelDescriptor from comfy import model_management import torch import comfy.utils import folder_paths try: from spandrel_extra_arches import EXTRA_REGISTRY from spandrel import MAIN_REGISTRY MAIN_REGISTRY.add(*EXTRA_REGISTRY) logging.info("Successfully imported spandrel_extra_arches: support for non commercial upscale models.") except: pass class UpscaleModelLoader: @classmethod def INPUT_TYPES(s): return {"required": { "model_name": (folder_paths.get_filename_list("upscale_models"), ), }} RETURN_TYPES = ("UPSCALE_MODEL",) FUNCTION = "load_model" CATEGORY = "loaders" def load_model(self, model_name): model_path = folder_paths.get_full_path("upscale_models", model_name) sd = comfy.utils.load_torch_file(model_path, safe_load=True) if "module.layers.0.residual_group.blocks.0.norm1.weight" in sd: sd = comfy.utils.state_dict_prefix_replace(sd, {"module.":""}) out = ModelLoader().load_from_state_dict(sd).eval() if not isinstance(out, ImageModelDescriptor): raise Exception("Upscale model must be a single-image model.") return (out, ) class ImageUpscaleWithModel: @classmethod def INPUT_TYPES(s): return {"required": { "upscale_model": ("UPSCALE_MODEL",), "image": ("IMAGE",), }} RETURN_TYPES = ("IMAGE",) FUNCTION = "upscale" CATEGORY = "image/upscaling" def upscale(self, upscale_model, image): device = model_management.get_torch_device() memory_required = model_management.module_size(upscale_model.model) memory_required += (512 * 512 * 3) * image.element_size() * max(upscale_model.scale, 1.0) * 384.0 memory_required += image.nelement() * image.element_size() model_management.free_memory(memory_required, device) upscale_model.to(device) in_img = image.movedim(-1,-3).to(device) tile = 512 overlap = 32 oom = True while oom: try: steps = in_img.shape[0] * comfy.utils.get_tiled_scale_steps(in_img.shape[3], in_img.shape[2], tile_x=tile, tile_y=tile, overlap=overlap) pbar = comfy.utils.ProgressBar(steps) s = comfy.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=tile, tile_y=tile, overlap=overlap, upscale_amount=upscale_model.scale, pbar=pbar) oom = False except model_management.OOM_EXCEPTION as e: tile if tile < 128: raise e upscale_model.to("cpu") s = torch.clamp(s.movedim(-3,-1), min=0, max=1.0) return (s,) NODE_CLASS_MAPPINGS = { "UpscaleModelLoader": UpscaleModelLoader, "ImageUpscaleWithModel": ImageUpscaleWithModel }
import nodes import torch import comfy.utils import comfy.sd import folder_paths import comfy_extras.nodes_model_merging class ImageOnlyCheckpointLoader: @classmethod def INPUT_TYPES(s): return {"required": { "ckpt_name": (folder_paths.get_filename_list("checkpoints"), ), }} RETURN_TYPES = ("MODEL", "CLIP_VISION", "VAE") FUNCTION = "load_checkpoint" CATEGORY = "loaders/video_models" def load_checkpoint(self, ckpt_name, output_vae=True, output_clip=True): ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) out = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=False, output_clipvision=True, embedding_directory=folder_paths.get_folder_paths("embeddings")) return (out[0], out[3], out[2]) class SVD_img2vid_Conditioning: @classmethod def INPUT_TYPES(s): return {"required": { "clip_vision": ("CLIP_VISION",), "init_image": ("IMAGE",), "vae": ("VAE",), "width": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "height": ("INT", {"default": 576, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), "video_frames": ("INT", {"default": 14, "min": 1, "max": 4096}), "motion_bucket_id": ("INT", {"default": 127, "min": 1, "max": 1023}), "fps": ("INT", {"default": 6, "min": 1, "max": 1024}), "augmentation_level": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.01}) }} RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") RETURN_NAMES = ("positive", "negative", "latent") FUNCTION = "encode" CATEGORY = "conditioning/video_models" def encode(self, clip_vision, init_image, vae, width, height, video_frames, motion_bucket_id, fps, augmentation_level): output = clip_vision.encode_image(init_image) pooled = output.image_embeds.unsqueeze(0) pixels = comfy.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) encode_pixels = pixels[:,:,:,:3] if augmentation_level > 0: encode_pixels += torch.randn_like(pixels) * augmentation_level t = vae.encode(encode_pixels) positive = [[pooled, {"motion_bucket_id": motion_bucket_id, "fps": fps, "augmentation_level": augmentation_level, "concat_latent_image": t}]] negative = [[torch.zeros_like(pooled), {"motion_bucket_id": motion_bucket_id, "fps": fps, "augmentation_level": augmentation_level, "concat_latent_image": torch.zeros_like(t)}]] latent = torch.zeros([video_frames, 4, height return (positive, negative, {"samples":latent}) class VideoLinearCFGGuidance: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "min_cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.5, "round": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "sampling/video_models" def patch(self, model, min_cfg): def linear_cfg(args): cond = args["cond"] uncond = args["uncond"] cond_scale = args["cond_scale"] scale = torch.linspace(min_cfg, cond_scale, cond.shape[0], device=cond.device).reshape((cond.shape[0], 1, 1, 1)) return uncond + scale * (cond - uncond) m = model.clone() m.set_model_sampler_cfg_function(linear_cfg) return (m, ) class VideoTriangleCFGGuidance: @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "min_cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.5, "round": 0.01}), }} RETURN_TYPES = ("MODEL",) FUNCTION = "patch" CATEGORY = "sampling/video_models" def patch(self, model, min_cfg): def linear_cfg(args): cond = args["cond"] uncond = args["uncond"] cond_scale = args["cond_scale"] period = 1.0 values = torch.linspace(0, 1, cond.shape[0], device=cond.device) values = 2 * (values / period - torch.floor(values / period + 0.5)).abs() scale = (values * (cond_scale - min_cfg) + min_cfg).reshape((cond.shape[0], 1, 1, 1)) return uncond + scale * (cond - uncond) m = model.clone() m.set_model_sampler_cfg_function(linear_cfg) return (m, ) class ImageOnlyCheckpointSave(comfy_extras.nodes_model_merging.CheckpointSave): CATEGORY = "_for_testing" @classmethod def INPUT_TYPES(s): return {"required": { "model": ("MODEL",), "clip_vision": ("CLIP_VISION",), "vae": ("VAE",), "filename_prefix": ("STRING", {"default": "checkpoints/ComfyUI"}),}, "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} def save(self, model, clip_vision, vae, filename_prefix, prompt=None, extra_pnginfo=None): comfy_extras.nodes_model_merging.save_checkpoint(model, clip_vision=clip_vision, vae=vae, filename_prefix=filename_prefix, output_dir=self.output_dir, prompt=prompt, extra_pnginfo=extra_pnginfo) return {} NODE_CLASS_MAPPINGS = { "ImageOnlyCheckpointLoader": ImageOnlyCheckpointLoader, "SVD_img2vid_Conditioning": SVD_img2vid_Conditioning, "VideoLinearCFGGuidance": VideoLinearCFGGuidance, "VideoTriangleCFGGuidance": VideoTriangleCFGGuidance, "ImageOnlyCheckpointSave": ImageOnlyCheckpointSave, } NODE_DISPLAY_NAME_MAPPINGS = { "ImageOnlyCheckpointLoader": "Image Only Checkpoint Loader (img2vid model)", }
import nodes import folder_paths MAX_RESOLUTION = nodes.MAX_RESOLUTION class WebcamCapture(nodes.LoadImage): @classmethod def INPUT_TYPES(s): return { "required": { "image": ("WEBCAM", {}), "width": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "height": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), "capture_on_queue": ("BOOLEAN", {"default": True}), } } RETURN_TYPES = ("IMAGE",) FUNCTION = "load_capture" CATEGORY = "image" def load_capture(s, image, **kwargs): return super().load_image(folder_paths.get_annotated_filepath(image)) NODE_CLASS_MAPPINGS = { "WebcamCapture": WebcamCapture, } NODE_DISPLAY_NAME_MAPPINGS = { "WebcamCapture": "Webcam Capture", }
from spandrel import ModelLoader def load_state_dict(state_dict): print("WARNING: comfy_extras.chainner_models is deprecated and has been replaced by the spandrel library.") return ModelLoader().load_from_state_dict(state_dict).eval()
from PIL import Image, ImageOps from io import BytesIO import numpy as np import struct import comfy.utils import time class SaveImageWebsocket: @classmethod def INPUT_TYPES(s): return {"required": {"images": ("IMAGE", ),} } RETURN_TYPES = () FUNCTION = "save_images" OUTPUT_NODE = True CATEGORY = "api/image" def save_images(self, images): pbar = comfy.utils.ProgressBar(images.shape[0]) step = 0 for image in images: i = 255. * image.cpu().numpy() img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) pbar.update_absolute(step, images.shape[0], ("PNG", img, None)) step += 1 return {} def IS_CHANGED(s, images): return time.time() NODE_CLASS_MAPPINGS = { "SaveImageWebsocket": SaveImageWebsocket, }
model: base_learning_rate: 1.0e-04 target: ldm.models.diffusion.ddpm.LatentDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False scheduler_config: target: ldm.lr_scheduler.LambdaLinearScheduler params: warm_up_steps: [ 10000 ] cycle_lengths: [ 10000000000000 ] f_start: [ 1.e-6 ] f_max: [ 1. ] f_min: [ 1. ] unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_heads: 8 use_spatial_transformer: True transformer_depth: 1 context_dim: 768 use_checkpoint: True legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenCLIPEmbedder params: layer: "hidden" layer_idx: -2
model: base_learning_rate: 1.0e-04 target: ldm.models.diffusion.ddpm.LatentDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False scheduler_config: target: ldm.lr_scheduler.LambdaLinearScheduler params: warm_up_steps: [ 10000 ] cycle_lengths: [ 10000000000000 ] f_start: [ 1.e-6 ] f_max: [ 1. ] f_min: [ 1. ] unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_heads: 8 use_spatial_transformer: True transformer_depth: 1 context_dim: 768 use_checkpoint: True legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenCLIPEmbedder
model: base_learning_rate: 1.0e-04 target: ldm.models.diffusion.ddpm.LatentDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False scheduler_config: target: ldm.lr_scheduler.LambdaLinearScheduler params: warm_up_steps: [ 10000 ] cycle_lengths: [ 10000000000000 ] f_start: [ 1.e-6 ] f_max: [ 1. ] f_min: [ 1. ] unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_heads: 8 use_spatial_transformer: True transformer_depth: 1 context_dim: 768 use_checkpoint: True legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenCLIPEmbedder params: layer: "hidden" layer_idx: -2
model: base_learning_rate: 1.0e-04 target: ldm.models.diffusion.ddpm.LatentDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False scheduler_config: target: ldm.lr_scheduler.LambdaLinearScheduler params: warm_up_steps: [ 10000 ] cycle_lengths: [ 10000000000000 ] f_start: [ 1.e-6 ] f_max: [ 1. ] f_min: [ 1. ] unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: use_fp16: True image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_heads: 8 use_spatial_transformer: True transformer_depth: 1 context_dim: 768 use_checkpoint: True legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenCLIPEmbedder params: layer: "hidden" layer_idx: -2
model: base_learning_rate: 1.0e-04 target: ldm.models.diffusion.ddpm.LatentDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False scheduler_config: target: ldm.lr_scheduler.LambdaLinearScheduler params: warm_up_steps: [ 10000 ] cycle_lengths: [ 10000000000000 ] f_start: [ 1.e-6 ] f_max: [ 1. ] f_min: [ 1. ] unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: use_fp16: True image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_heads: 8 use_spatial_transformer: True transformer_depth: 1 context_dim: 768 use_checkpoint: True legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenCLIPEmbedder
model: base_learning_rate: 7.5e-05 target: ldm.models.diffusion.ddpm.LatentInpaintDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: hybrid monitor: val/loss_simple_ema scale_factor: 0.18215 finetune_keys: null scheduler_config: target: ldm.lr_scheduler.LambdaLinearScheduler params: warm_up_steps: [ 2500 ] cycle_lengths: [ 10000000000000 ] f_start: [ 1.e-6 ] f_max: [ 1. ] f_min: [ 1. ] unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: image_size: 32 in_channels: 9 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_heads: 8 use_spatial_transformer: True transformer_depth: 1 context_dim: 768 use_checkpoint: True legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenCLIPEmbedder
model: base_learning_rate: 1.0e-4 target: ldm.models.diffusion.ddpm.LatentDiffusion params: parameterization: "v" linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: use_checkpoint: True use_fp16: True image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_head_channels: 64 use_spatial_transformer: True use_linear_in_transformer: True transformer_depth: 1 context_dim: 1024 legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenOpenCLIPEmbedder params: freeze: True layer: "penultimate"
model: base_learning_rate: 1.0e-4 target: ldm.models.diffusion.ddpm.LatentDiffusion params: parameterization: "v" linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: use_checkpoint: True use_fp16: False image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_head_channels: 64 use_spatial_transformer: True use_linear_in_transformer: True transformer_depth: 1 context_dim: 1024 legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenOpenCLIPEmbedder params: freeze: True layer: "penultimate"
model: base_learning_rate: 1.0e-4 target: ldm.models.diffusion.ddpm.LatentDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: use_checkpoint: True use_fp16: True image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_head_channels: 64 use_spatial_transformer: True use_linear_in_transformer: True transformer_depth: 1 context_dim: 1024 legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenOpenCLIPEmbedder params: freeze: True layer: "penultimate"
model: base_learning_rate: 1.0e-4 target: ldm.models.diffusion.ddpm.LatentDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: crossattn monitor: val/loss_simple_ema scale_factor: 0.18215 use_ema: False unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: use_checkpoint: True use_fp16: False image_size: 32 in_channels: 4 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_head_channels: 64 use_spatial_transformer: True use_linear_in_transformer: True transformer_depth: 1 context_dim: 1024 legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenOpenCLIPEmbedder params: freeze: True layer: "penultimate"
model: base_learning_rate: 5.0e-05 target: ldm.models.diffusion.ddpm.LatentInpaintDiffusion params: linear_start: 0.00085 linear_end: 0.0120 num_timesteps_cond: 1 log_every_t: 200 timesteps: 1000 first_stage_key: "jpg" cond_stage_key: "txt" image_size: 64 channels: 4 cond_stage_trainable: false conditioning_key: hybrid scale_factor: 0.18215 monitor: val/loss_simple_ema finetune_keys: null use_ema: False unet_config: target: ldm.modules.diffusionmodules.openaimodel.UNetModel params: use_checkpoint: True image_size: 32 in_channels: 9 out_channels: 4 model_channels: 320 attention_resolutions: [ 4, 2, 1 ] num_res_blocks: 2 channel_mult: [ 1, 2, 4, 4 ] num_head_channels: 64 use_spatial_transformer: True use_linear_in_transformer: True transformer_depth: 1 context_dim: 1024 legacy: False first_stage_config: target: ldm.models.autoencoder.AutoencoderKL params: embed_dim: 4 monitor: val/rec_loss ddconfig: double_z: true z_channels: 4 resolution: 256 in_channels: 3 out_ch: 3 ch: 128 ch_mult: - 1 - 2 - 4 - 4 num_res_blocks: 2 attn_resolutions: [ ] dropout: 0.0 lossconfig: target: torch.nn.Identity cond_stage_config: target: ldm.modules.encoders.modules.FrozenOpenCLIPEmbedder params: freeze: True layer: "penultimate" data: target: ldm.data.laion.WebDataModuleFromConfig params: tar_base: null p_unsafe_threshold: 0.1 filter_word_list: "data/filters.yaml" max_pwatermark: 0.45 batch_size: 8 num_workers: 6 multinode: True min_size: 512 train: shards: - "pipe:aws s3 cp s3: - "pipe:aws s3 cp s3: - "pipe:aws s3 cp s3: - "pipe:aws s3 cp s3: - "pipe:aws s3 cp s3: shuffle: 10000 image_key: jpg image_transforms: - target: torchvision.transforms.Resize params: size: 512 interpolation: 3 - target: torchvision.transforms.RandomCrop params: size: 512 postprocess: target: ldm.data.laion.AddMask params: mode: "512train-large" p_drop: 0.25 validation: shards: - "pipe:aws s3 cp s3: shuffle: 0 image_key: jpg image_transforms: - target: torchvision.transforms.Resize params: size: 512 interpolation: 3 - target: torchvision.transforms.CenterCrop params: size: 512 postprocess: target: ldm.data.laion.AddMask params: mode: "512train-large" p_drop: 0.25 lightning: find_unused_parameters: True modelcheckpoint: params: every_n_train_steps: 5000 callbacks: metrics_over_trainsteps_checkpoint: params: every_n_train_steps: 10000 image_logger: target: main.ImageLogger params: enable_autocast: False disabled: False batch_frequency: 1000 max_images: 4 increase_log_steps: False log_first_step: False log_images_kwargs: use_ema_scope: False inpaint: False plot_progressive_rows: False plot_diffusion_rows: False N: 4 unconditional_guidance_scale: 5.0 unconditional_guidance_label: [""] ddim_steps: 50 ddim_eta: 0.0 trainer: benchmark: True val_check_interval: 5000000 num_sanity_val_steps: 0 accumulate_grad_batches: 1
import json from urllib import request, parse import random prompt_text = """ { "3": { "class_type": "KSampler", "inputs": { "cfg": 8, "denoise": 1, "latent_image": [ "5", 0 ], "model": [ "4", 0 ], "negative": [ "7", 0 ], "positive": [ "6", 0 ], "sampler_name": "euler", "scheduler": "normal", "seed": 8566257, "steps": 20 } }, "4": { "class_type": "CheckpointLoaderSimple", "inputs": { "ckpt_name": "v1-5-pruned-emaonly.ckpt" } }, "5": { "class_type": "EmptyLatentImage", "inputs": { "batch_size": 1, "height": 512, "width": 512 } }, "6": { "class_type": "CLIPTextEncode", "inputs": { "clip": [ "4", 1 ], "text": "masterpiece best quality girl" } }, "7": { "class_type": "CLIPTextEncode", "inputs": { "clip": [ "4", 1 ], "text": "bad hands" } }, "8": { "class_type": "VAEDecode", "inputs": { "samples": [ "3", 0 ], "vae": [ "4", 2 ] } }, "9": { "class_type": "SaveImage", "inputs": { "filename_prefix": "ComfyUI", "images": [ "8", 0 ] } } } """ def queue_prompt(prompt): p = {"prompt": prompt} data = json.dumps(p).encode('utf-8') req = request.Request("http: request.urlopen(req) prompt = json.loads(prompt_text) prompt["6"]["inputs"]["text"] = "masterpiece best quality man" prompt["3"]["inputs"]["seed"] = 5 queue_prompt(prompt)
import websocket import uuid import json import urllib.request import urllib.parse server_address = "127.0.0.1:8188" client_id = str(uuid.uuid4()) def queue_prompt(prompt): p = {"prompt": prompt, "client_id": client_id} data = json.dumps(p).encode('utf-8') req = urllib.request.Request("http: return json.loads(urllib.request.urlopen(req).read()) def get_image(filename, subfolder, folder_type): data = {"filename": filename, "subfolder": subfolder, "type": folder_type} url_values = urllib.parse.urlencode(data) with urllib.request.urlopen("http: return response.read() def get_history(prompt_id): with urllib.request.urlopen("http: return json.loads(response.read()) def get_images(ws, prompt): prompt_id = queue_prompt(prompt)['prompt_id'] output_images = {} while True: out = ws.recv() if isinstance(out, str): message = json.loads(out) if message['type'] == 'executing': data = message['data'] if data['node'] is None and data['prompt_id'] == prompt_id: break else: continue history = get_history(prompt_id)[prompt_id] for o in history['outputs']: for node_id in history['outputs']: node_output = history['outputs'][node_id] if 'images' in node_output: images_output = [] for image in node_output['images']: image_data = get_image(image['filename'], image['subfolder'], image['type']) images_output.append(image_data) output_images[node_id] = images_output return output_images prompt_text = """ { "3": { "class_type": "KSampler", "inputs": { "cfg": 8, "denoise": 1, "latent_image": [ "5", 0 ], "model": [ "4", 0 ], "negative": [ "7", 0 ], "positive": [ "6", 0 ], "sampler_name": "euler", "scheduler": "normal", "seed": 8566257, "steps": 20 } }, "4": { "class_type": "CheckpointLoaderSimple", "inputs": { "ckpt_name": "v1-5-pruned-emaonly.ckpt" } }, "5": { "class_type": "EmptyLatentImage", "inputs": { "batch_size": 1, "height": 512, "width": 512 } }, "6": { "class_type": "CLIPTextEncode", "inputs": { "clip": [ "4", 1 ], "text": "masterpiece best quality girl" } }, "7": { "class_type": "CLIPTextEncode", "inputs": { "clip": [ "4", 1 ], "text": "bad hands" } }, "8": { "class_type": "VAEDecode", "inputs": { "samples": [ "3", 0 ], "vae": [ "4", 2 ] } }, "9": { "class_type": "SaveImage", "inputs": { "filename_prefix": "ComfyUI", "images": [ "8", 0 ] } } } """ prompt = json.loads(prompt_text) prompt["6"]["inputs"]["text"] = "masterpiece best quality man" prompt["3"]["inputs"]["seed"] = 5 ws = websocket.WebSocket() ws.connect("ws: images = get_images(ws, prompt)
import websocket import uuid import json import urllib.request import urllib.parse server_address = "127.0.0.1:8188" client_id = str(uuid.uuid4()) def queue_prompt(prompt): p = {"prompt": prompt, "client_id": client_id} data = json.dumps(p).encode('utf-8') req = urllib.request.Request("http: return json.loads(urllib.request.urlopen(req).read()) def get_image(filename, subfolder, folder_type): data = {"filename": filename, "subfolder": subfolder, "type": folder_type} url_values = urllib.parse.urlencode(data) with urllib.request.urlopen("http: return response.read() def get_history(prompt_id): with urllib.request.urlopen("http: return json.loads(response.read()) def get_images(ws, prompt): prompt_id = queue_prompt(prompt)['prompt_id'] output_images = {} current_node = "" while True: out = ws.recv() if isinstance(out, str): message = json.loads(out) if message['type'] == 'executing': data = message['data'] if data['prompt_id'] == prompt_id: if data['node'] is None: break else: current_node = data['node'] else: if current_node == 'save_image_websocket_node': images_output = output_images.get(current_node, []) images_output.append(out[8:]) output_images[current_node] = images_output return output_images prompt_text = """ { "3": { "class_type": "KSampler", "inputs": { "cfg": 8, "denoise": 1, "latent_image": [ "5", 0 ], "model": [ "4", 0 ], "negative": [ "7", 0 ], "positive": [ "6", 0 ], "sampler_name": "euler", "scheduler": "normal", "seed": 8566257, "steps": 20 } }, "4": { "class_type": "CheckpointLoaderSimple", "inputs": { "ckpt_name": "v1-5-pruned-emaonly.ckpt" } }, "5": { "class_type": "EmptyLatentImage", "inputs": { "batch_size": 1, "height": 512, "width": 512 } }, "6": { "class_type": "CLIPTextEncode", "inputs": { "clip": [ "4", 1 ], "text": "masterpiece best quality girl" } }, "7": { "class_type": "CLIPTextEncode", "inputs": { "clip": [ "4", 1 ], "text": "bad hands" } }, "8": { "class_type": "VAEDecode", "inputs": { "samples": [ "3", 0 ], "vae": [ "4", 2 ] } }, "save_image_websocket_node": { "class_type": "SaveImageWebsocket", "inputs": { "images": [ "8", 0 ] } } } """ prompt = json.loads(prompt_text) prompt["6"]["inputs"]["text"] = "masterpiece best quality man" prompt["3"]["inputs"]["seed"] = 5 ws = websocket.WebSocket() ws.connect("ws: images = get_images(ws, prompt)
import os import pytest def pytest_addoption(parser): parser.addoption('--output_dir', action="store", default='tests/inference/samples', help='Output directory for generated images') parser.addoption("--listen", type=str, default="127.0.0.1", metavar="IP", nargs="?", const="0.0.0.0", help="Specify the IP address to listen on (default: 127.0.0.1). If --listen is provided without an argument, it defaults to 0.0.0.0. (listens on all)") parser.addoption("--port", type=int, default=8188, help="Set the listen port.") @pytest.fixture(scope="session", autouse=True) def args_pytest(pytestconfig): args = {} args['output_dir'] = pytestconfig.getoption('output_dir') args['listen'] = pytestconfig.getoption('listen') args['port'] = pytestconfig.getoption('port') os.makedirs(args['output_dir'], exist_ok=True) return args def pytest_collection_modifyitems(items): LAST_TESTS = ['test_quality'] last_items = [] for test_name in LAST_TESTS: for item in items.copy(): print(item.module.__name__, item) if item.module.__name__ == test_name: last_items.append(item) items.remove(item) items.extend(last_items)
Additional requirements for running tests: ``` pip install pytest pip install websocket-client==1.6.1 opencv-python==4.6.0.66 scikit-image==0.21.0 ``` Run inference tests: ``` pytest tests/inference ``` Compares images in 2 directories to ensure they are the same 1) Run an inference test to save a directory of "ground truth" images ``` pytest tests/inference --output_dir tests/inference/baseline ``` 2) Make code edits 3) Run inference and quality comparison tests ``` pytest ```
import os import pytest def pytest_addoption(parser): parser.addoption('--baseline_dir', action="store", default='tests/inference/baseline', help='Directory for ground-truth images') parser.addoption('--test_dir', action="store", default='tests/inference/samples', help='Directory for images to test') parser.addoption('--metrics_file', action="store", default='tests/metrics.md', help='Output file for metrics') parser.addoption('--img_output_dir', action="store", default='tests/compare/samples', help='Output directory for diff metric images') @pytest.fixture(scope="session", autouse=True) def args_pytest(pytestconfig): args = {} args['baseline_dir'] = pytestconfig.getoption('baseline_dir') args['test_dir'] = pytestconfig.getoption('test_dir') args['metrics_file'] = pytestconfig.getoption('metrics_file') args['img_output_dir'] = pytestconfig.getoption('img_output_dir') with open(args['metrics_file'], 'a') as f: if os.stat(args['metrics_file']).st_size == 0: f.write("| date | run | file | status | value | \n") f.write("| --- | --- | --- | --- | --- | \n") return args def gather_file_basenames(directory: str): files = [] for file in os.listdir(directory): if file.endswith(".png"): files.append(file) return files def pytest_generate_tests(metafunc): if "baseline_fname" in metafunc.fixturenames: baseline_fnames = gather_file_basenames(metafunc.config.getoption("baseline_dir")) metafunc.parametrize("baseline_fname", baseline_fnames)
import datetime import numpy as np import os from PIL import Image import pytest from pytest import fixture from typing import Tuple, List from cv2 import imread, cvtColor, COLOR_BGR2RGB from skimage.metrics import structural_similarity as ssim """ This test suite compares images in 2 directories by file name The directories are specified by the command line arguments --baseline_dir and --test_dir """ def ssim_score(img0: np.ndarray, img1: np.ndarray) -> Tuple[float, np.ndarray]: score, diff = ssim(img0, img1, channel_axis=-1, full=True) diff = (diff * 255).astype("uint8") return score, diff METRICS = {"ssim": ssim_score} METRICS_PASS_THRESHOLD = {"ssim": 0.95} class TestCompareImageMetrics: @fixture(scope="class") def test_file_names(self, args_pytest): test_dir = args_pytest['test_dir'] fnames = self.gather_file_basenames(test_dir) yield fnames del fnames @fixture(scope="class", autouse=True) def teardown(self, args_pytest): yield baseline_dir = args_pytest['baseline_dir'] test_dir = args_pytest['test_dir'] img_output_dir = args_pytest['img_output_dir'] metrics_file = args_pytest['metrics_file'] grid_dir = os.path.join(img_output_dir, "grid") os.makedirs(grid_dir, exist_ok=True) for metric_dir in METRICS.keys(): metric_path = os.path.join(img_output_dir, metric_dir) for file in os.listdir(metric_path): if file.endswith(".png"): score = self.lookup_score_from_fname(file, metrics_file) image_file_list = [] image_file_list.append([ os.path.join(baseline_dir, file), os.path.join(test_dir, file), os.path.join(metric_path, file) ]) image_list = [[Image.open(file) for file in files] for files in image_file_list] grid = self.image_grid(image_list) grid.save(os.path.join(grid_dir, f"{metric_dir}_{score:.3f}_{file}")) @fixture() def fname(self, baseline_fname): yield baseline_fname del baseline_fname def test_directories_not_empty(self, args_pytest): baseline_dir = args_pytest['baseline_dir'] test_dir = args_pytest['test_dir'] assert len(os.listdir(baseline_dir)) != 0, f"Baseline directory {baseline_dir} is empty" assert len(os.listdir(test_dir)) != 0, f"Test directory {test_dir} is empty" def test_dir_has_all_matching_metadata(self, fname, test_file_names, args_pytest): baseline_file_path = os.path.join(args_pytest['baseline_dir'], fname) file_paths = [os.path.join(args_pytest['test_dir'], f) for f in test_file_names] file_match = self.find_file_match(baseline_file_path, file_paths) assert file_match is not None, f"Could not find a file in {args_pytest['test_dir']} with matching metadata to {baseline_file_path}" @pytest.mark.parametrize("metric", METRICS.keys()) def test_pipeline_compare( self, args_pytest, fname, test_file_names, metric, ): baseline_dir = args_pytest['baseline_dir'] test_dir = args_pytest['test_dir'] metrics_output_file = args_pytest['metrics_file'] img_output_dir = args_pytest['img_output_dir'] baseline_file_path = os.path.join(baseline_dir, fname) file_paths = [os.path.join(test_dir, f) for f in test_file_names] test_file = self.find_file_match(baseline_file_path, file_paths) sample_baseline = self.read_img(baseline_file_path) sample_secondary = self.read_img(test_file) score, metric_img = METRICS[metric](sample_baseline, sample_secondary) metric_status = score > METRICS_PASS_THRESHOLD[metric] with open(metrics_output_file, 'a') as f: run_info = os.path.splitext(fname)[0] metric_status_str = "PASS ✅" if metric_status else "FAIL ❌" date_str = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") f.write(f"| {date_str} | {run_info} | {metric} | {metric_status_str} | {score} | \n") metric_img_dir = os.path.join(img_output_dir, metric) os.makedirs(metric_img_dir, exist_ok=True) output_filename = f'{fname}' Image.fromarray(metric_img).save(os.path.join(metric_img_dir, output_filename)) assert score > METRICS_PASS_THRESHOLD[metric] def read_img(self, filename: str) -> np.ndarray: cvImg = imread(filename) cvImg = cvtColor(cvImg, COLOR_BGR2RGB) return cvImg def image_grid(self, img_list: list[list[Image.Image]]): rows = len(img_list) cols = len(img_list[0]) w, h = img_list[0][0].size grid = Image.new('RGB', size=(cols*w, rows*h)) for i, row in enumerate(img_list): for j, img in enumerate(row): grid.paste(img, box=(j*w, i*h)) return grid def lookup_score_from_fname(self, fname: str, metrics_output_file: str ) -> float: fname_basestr = os.path.splitext(fname)[0] with open(metrics_output_file, 'r') as f: for line in f: if fname_basestr in line: score = float(line.split('|')[5]) return score raise ValueError(f"Could not find score for {fname} in {metrics_output_file}") def gather_file_basenames(self, directory: str): files = [] for file in os.listdir(directory): if file.endswith(".png"): files.append(file) return files def read_file_prompt(self, fname:str) -> str: img = Image.open(fname) img.load() return img.info['prompt'] def find_file_match(self, baseline_file: str, file_paths: List[str]): baseline_prompt = self.read_file_prompt(baseline_file) if baseline_prompt is None or baseline_prompt == "": return None basename = os.path.basename(baseline_file) file_path_basenames = [os.path.basename(f) for f in file_paths] if basename in file_path_basenames: match_index = file_path_basenames.index(basename) file_paths.insert(0, file_paths.pop(match_index)) for f in file_paths: test_file_prompt = self.read_file_prompt(f) if baseline_prompt == test_file_prompt: return f
from copy import deepcopy from io import BytesIO from urllib import request import numpy import os from PIL import Image import pytest from pytest import fixture import time import torch from typing import Union import json import subprocess import websocket import uuid import urllib.request import urllib.parse from comfy.samplers import KSampler """ These tests generate and save images through a range of parameters """ class ComfyGraph: def __init__(self, graph: dict, sampler_nodes: list[str], ): self.graph = graph self.sampler_nodes = sampler_nodes def set_prompt(self, prompt, negative_prompt=None): for node in self.sampler_nodes: prompt_node = self.graph[node]['inputs']['positive'][0] self.graph[prompt_node]['inputs']['text'] = prompt if negative_prompt: negative_prompt_node = self.graph[node]['inputs']['negative'][0] self.graph[negative_prompt_node]['inputs']['text'] = negative_prompt def set_sampler_name(self, sampler_name:str, ): for node in self.sampler_nodes: self.graph[node]['inputs']['sampler_name'] = sampler_name def set_scheduler(self, scheduler:str): for node in self.sampler_nodes: self.graph[node]['inputs']['scheduler'] = scheduler def set_filename_prefix(self, prefix:str): for node in self.graph: if self.graph[node]['class_type'] == 'SaveImage': self.graph[node]['inputs']['filename_prefix'] = prefix class ComfyClient: def connect(self, listen:str = '127.0.0.1', port:Union[str,int] = 8188, client_id: str = str(uuid.uuid4()) ): self.client_id = client_id self.server_address = f"{listen}:{port}" ws = websocket.WebSocket() ws.connect("ws: self.ws = ws def queue_prompt(self, prompt): p = {"prompt": prompt, "client_id": self.client_id} data = json.dumps(p).encode('utf-8') req = urllib.request.Request("http: return json.loads(urllib.request.urlopen(req).read()) def get_image(self, filename, subfolder, folder_type): data = {"filename": filename, "subfolder": subfolder, "type": folder_type} url_values = urllib.parse.urlencode(data) with urllib.request.urlopen("http: return response.read() def get_history(self, prompt_id): with urllib.request.urlopen("http: return json.loads(response.read()) def get_images(self, graph, save=True): prompt = graph if not save: prompt_str = json.dumps(prompt) prompt_str = prompt_str.replace('SaveImage', 'PreviewImage') prompt = json.loads(prompt_str) prompt_id = self.queue_prompt(prompt)['prompt_id'] output_images = {} while True: out = self.ws.recv() if isinstance(out, str): message = json.loads(out) if message['type'] == 'executing': data = message['data'] if data['node'] is None and data['prompt_id'] == prompt_id: break else: continue history = self.get_history(prompt_id)[prompt_id] for o in history['outputs']: for node_id in history['outputs']: node_output = history['outputs'][node_id] if 'images' in node_output: images_output = [] for image in node_output['images']: image_data = self.get_image(image['filename'], image['subfolder'], image['type']) images_output.append(image_data) output_images[node_id] = images_output return output_images default_graph_file = 'tests/inference/graphs/default_graph_sdxl1_0.json' with open(default_graph_file, 'r') as file: default_graph = json.loads(file.read()) DEFAULT_COMFY_GRAPH = ComfyGraph(graph=default_graph, sampler_nodes=['10','14']) DEFAULT_COMFY_GRAPH_ID = os.path.splitext(os.path.basename(default_graph_file))[0] comfy_graph_list = [DEFAULT_COMFY_GRAPH] comfy_graph_ids = [DEFAULT_COMFY_GRAPH_ID] prompt_list = [ 'a painting of a cat', ] sampler_list = KSampler.SAMPLERS scheduler_list = KSampler.SCHEDULERS @pytest.mark.inference @pytest.mark.parametrize("sampler", sampler_list) @pytest.mark.parametrize("scheduler", scheduler_list) @pytest.mark.parametrize("prompt", prompt_list) class TestInference: @fixture(scope="class", autouse=True) def _server(self, args_pytest): p = subprocess.Popen([ 'python','main.py', '--output-directory', args_pytest["output_dir"], '--listen', args_pytest["listen"], '--port', str(args_pytest["port"]), ]) yield p.kill() torch.cuda.empty_cache() def start_client(self, listen:str, port:int): comfy_client = ComfyClient() n_tries = 5 for i in range(n_tries): time.sleep(4) try: comfy_client.connect(listen=listen, port=port) except ConnectionRefusedError as e: print(e) print(f"({i+1}/{n_tries}) Retrying...") else: break return comfy_client @fixture(scope="class", params=comfy_graph_list, ids=comfy_graph_ids, autouse=True) def _client_graph(self, request, args_pytest, _server) -> (ComfyClient, ComfyGraph): comfy_graph = request.param comfy_client = self.start_client(args_pytest["listen"], args_pytest["port"]) comfy_client.get_images(graph=comfy_graph.graph, save=False) yield comfy_client, comfy_graph del comfy_client del comfy_graph torch.cuda.empty_cache() @fixture def client(self, _client_graph): client = _client_graph[0] yield client @fixture def comfy_graph(self, _client_graph): graph = deepcopy(_client_graph[1]) yield graph def test_comfy( self, client, comfy_graph, sampler, scheduler, prompt, request ): test_info = request.node.name comfy_graph.set_filename_prefix(test_info) comfy_graph.set_sampler_name(sampler) comfy_graph.set_scheduler(scheduler) comfy_graph.set_prompt(prompt) images = client.get_images(comfy_graph.graph) assert len(images) != 0, "No images generated" for images_output in images.values(): for image_data in images_output: pil_image = Image.open(BytesIO(image_data)) assert numpy.array(pil_image).any() != 0, "Image is blank"
{ "4": { "inputs": { "ckpt_name": "sd_xl_base_1.0.safetensors" }, "class_type": "CheckpointLoaderSimple" }, "5": { "inputs": { "width": 1024, "height": 1024, "batch_size": 1 }, "class_type": "EmptyLatentImage" }, "6": { "inputs": { "text": "a photo of a cat", "clip": [ "4", 1 ] }, "class_type": "CLIPTextEncode" }, "10": { "inputs": { "add_noise": "enable", "noise_seed": 42, "steps": 20, "cfg": 7.5, "sampler_name": "euler", "scheduler": "normal", "start_at_step": 0, "end_at_step": 32, "return_with_leftover_noise": "enable", "model": [ "4", 0 ], "positive": [ "6", 0 ], "negative": [ "15", 0 ], "latent_image": [ "5", 0 ] }, "class_type": "KSamplerAdvanced" }, "12": { "inputs": { "samples": [ "14", 0 ], "vae": [ "4", 2 ] }, "class_type": "VAEDecode" }, "13": { "inputs": { "filename_prefix": "test_inference", "images": [ "12", 0 ] }, "class_type": "SaveImage" }, "14": { "inputs": { "add_noise": "disable", "noise_seed": 42, "steps": 20, "cfg": 7.5, "sampler_name": "euler", "scheduler": "normal", "start_at_step": 32, "end_at_step": 10000, "return_with_leftover_noise": "disable", "model": [ "16", 0 ], "positive": [ "17", 0 ], "negative": [ "20", 0 ], "latent_image": [ "10", 0 ] }, "class_type": "KSamplerAdvanced" }, "15": { "inputs": { "conditioning": [ "6", 0 ] }, "class_type": "ConditioningZeroOut" }, "16": { "inputs": { "ckpt_name": "sd_xl_refiner_1.0.safetensors" }, "class_type": "CheckpointLoaderSimple" }, "17": { "inputs": { "text": "a photo of a cat", "clip": [ "16", 1 ] }, "class_type": "CLIPTextEncode" }, "20": { "inputs": { "text": "", "clip": [ "16", 1 ] }, "class_type": "CLIPTextEncode" } }
{ "presets": ["@babel/preset-env"], "plugins": ["babel-plugin-transform-import-meta"] }
{ "name": "comfui-tests", "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "comfui-tests", "version": "1.0.0", "license": "GPL-3.0", "devDependencies": { "@babel/preset-env": "^7.22.20", "@types/jest": "^29.5.5", "babel-plugin-transform-import-meta": "^2.2.1", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0" } }, "node_modules/@ampproject/remapping": { "version": "2.2.1", "resolved": "https: "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", "dev": true, "dependencies": { "@jridgewell/gen-mapping": "^0.3.0", "@jridgewell/trace-mapping": "^0.3.9" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@babel/code-frame": { "version": "7.22.13", "resolved": "https: "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", "dev": true, "dependencies": { "@babel/highlight": "^7.22.13", "chalk": "^2.4.2" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/code-frame/node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https: "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dev": true, "dependencies": { "color-convert": "^1.9.0" }, "engines": { "node": ">=4" } }, "node_modules/@babel/code-frame/node_modules/chalk": { "version": "2.4.2", "resolved": "https: "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dev": true, "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" }, "engines": { "node": ">=4" } }, "node_modules/@babel/code-frame/node_modules/color-convert": { "version": "1.9.3", "resolved": "https: "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dev": true, "dependencies": { "color-name": "1.1.3" } }, "node_modules/@babel/code-frame/node_modules/color-name": { "version": "1.1.3", "resolved": "https: "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", "dev": true }, "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { "version": "1.0.5", "resolved": "https: "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true, "engines": { "node": ">=0.8.0" } }, "node_modules/@babel/code-frame/node_modules/has-flag": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", "dev": true, "engines": { "node": ">=4" } }, "node_modules/@babel/code-frame/node_modules/supports-color": { "version": "5.5.0", "resolved": "https: "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "dev": true, "dependencies": { "has-flag": "^3.0.0" }, "engines": { "node": ">=4" } }, "node_modules/@babel/compat-data": { "version": "7.22.20", "resolved": "https: "integrity": "sha512-BQYjKbpXjoXwFW5jGqiizJQQT/aC7pFm9Ok1OWssonuguICi264lbgMzRp2ZMmRSlfkX6DsWDDcsrctK8Rwfiw==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-97z/ju/Jy1rZmDxybphrBuI+jtJjFVoz7Mr9yUQVVVi+DNZE333uFQeMOqcCIy1x3WYBIbWftUSLmbNXNT7qFQ==", "dev": true, "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.22.13", "@babel/generator": "^7.23.0", "@babel/helper-compilation-targets": "^7.22.15", "@babel/helper-module-transforms": "^7.23.0", "@babel/helpers": "^7.23.0", "@babel/parser": "^7.23.0", "@babel/template": "^7.22.15", "@babel/traverse": "^7.23.0", "@babel/types": "^7.23.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" }, "funding": { "type": "opencollective", "url": "https: } }, "node_modules/@babel/generator": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==", "dev": true, "dependencies": { "@babel/types": "^7.23.0", "@jridgewell/gen-mapping": "^0.3.2", "@jridgewell/trace-mapping": "^0.3.17", "jsesc": "^2.5.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-annotate-as-pure": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==", "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw==", "dev": true, "dependencies": { "@babel/types": "^7.22.15" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-compilation-targets": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw==", "dev": true, "dependencies": { "@babel/compat-data": "^7.22.9", "@babel/helper-validator-option": "^7.22.15", "browserslist": "^4.21.9", "lru-cache": "^5.1.1", "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-create-class-features-plugin": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-jKkwA59IXcvSaiK2UN45kKwSC9o+KuoXsBDvHvU/7BecYIp8GQ2UwrVvFgJASUT+hBnwJx6MhvMCuMzwZZ7jlg==", "dev": true, "dependencies": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-environment-visitor": "^7.22.5", "@babel/helper-function-name": "^7.22.5", "@babel/helper-member-expression-to-functions": "^7.22.15", "@babel/helper-optimise-call-expression": "^7.22.5", "@babel/helper-replace-supers": "^7.22.9", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-create-regexp-features-plugin": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w==", "dev": true, "dependencies": { "@babel/helper-annotate-as-pure": "^7.22.5", "regexpu-core": "^5.3.1", "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-define-polyfill-provider": { "version": "0.4.2", "resolved": "https: "integrity": "sha512-k0qnnOqHn5dK9pZpfD5XXZ9SojAITdCKRn2Lp6rnDGzIbaP0rHyMPk/4wsSxVBVz4RfN0q6VpXWP2pDGIoQ7hw==", "dev": true, "dependencies": { "@babel/helper-compilation-targets": "^7.22.6", "@babel/helper-plugin-utils": "^7.22.5", "debug": "^4.1.1", "lodash.debounce": "^4.0.8", "resolve": "^1.14.2" }, "peerDependencies": { "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" } }, "node_modules/@babel/helper-environment-visitor": { "version": "7.22.20", "resolved": "https: "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-function-name": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", "dev": true, "dependencies": { "@babel/template": "^7.22.15", "@babel/types": "^7.23.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-hoist-variables": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-member-expression-to-functions": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA==", "dev": true, "dependencies": { "@babel/types": "^7.23.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==", "dev": true, "dependencies": { "@babel/types": "^7.22.15" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-WhDWw1tdrlT0gMgUJSlX0IQvoO1eN279zrAUbVB+KpV2c3Tylz8+GnKOLllCS6Z/iZQEyVYxhZVUdPTqs2YYPw==", "dev": true, "dependencies": { "@babel/helper-environment-visitor": "^7.22.20", "@babel/helper-module-imports": "^7.22.15", "@babel/helper-simple-access": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", "@babel/helper-validator-identifier": "^7.22.20" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-optimise-call-expression": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==", "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-plugin-utils": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-remap-async-to-generator": { "version": "7.22.20", "resolved": "https: "integrity": "sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw==", "dev": true, "dependencies": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-environment-visitor": "^7.22.20", "@babel/helper-wrap-function": "^7.22.20" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-replace-supers": { "version": "7.22.20", "resolved": "https: "integrity": "sha512-qsW0In3dbwQUbK8kejJ4R7IHVGwHJlV6lpG6UA7a9hSa2YEiAib+N1T2kr6PEeUT+Fl7najmSOS6SmAwCHK6Tw==", "dev": true, "dependencies": { "@babel/helper-environment-visitor": "^7.22.20", "@babel/helper-member-expression-to-functions": "^7.22.15", "@babel/helper-optimise-call-expression": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-simple-access": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==", "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-skip-transparent-expression-wrappers": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==", "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-split-export-declaration": { "version": "7.22.6", "resolved": "https: "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==", "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { "version": "7.22.20", "resolved": "https: "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-bMn7RmyFjY/mdECUbgn9eoSY4vqvacUnS9i9vGAGttgFWesO6B4CYWA7XlpbWgBt71iv/hfbPlynohStqnu5hA==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-wrap-function": { "version": "7.22.20", "resolved": "https: "integrity": "sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw==", "dev": true, "dependencies": { "@babel/helper-function-name": "^7.22.5", "@babel/template": "^7.22.15", "@babel/types": "^7.22.19" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { "version": "7.23.1", "resolved": "https: "integrity": "sha512-chNpneuK18yW5Oxsr+t553UZzzAs3aZnFm4bxhebsNTeshrC95yA7l5yl7GBAG+JG1rF0F7zzD2EixK9mWSDoA==", "dev": true, "dependencies": { "@babel/template": "^7.22.15", "@babel/traverse": "^7.23.0", "@babel/types": "^7.23.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { "version": "7.22.20", "resolved": "https: "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", "dev": true, "dependencies": { "@babel/helper-validator-identifier": "^7.22.20", "chalk": "^2.4.2", "js-tokens": "^4.0.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight/node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https: "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dev": true, "dependencies": { "color-convert": "^1.9.0" }, "engines": { "node": ">=4" } }, "node_modules/@babel/highlight/node_modules/chalk": { "version": "2.4.2", "resolved": "https: "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dev": true, "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" }, "engines": { "node": ">=4" } }, "node_modules/@babel/highlight/node_modules/color-convert": { "version": "1.9.3", "resolved": "https: "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dev": true, "dependencies": { "color-name": "1.1.3" } }, "node_modules/@babel/highlight/node_modules/color-name": { "version": "1.1.3", "resolved": "https: "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", "dev": true }, "node_modules/@babel/highlight/node_modules/escape-string-regexp": { "version": "1.0.5", "resolved": "https: "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true, "engines": { "node": ">=0.8.0" } }, "node_modules/@babel/highlight/node_modules/has-flag": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", "dev": true, "engines": { "node": ">=4" } }, "node_modules/@babel/highlight/node_modules/supports-color": { "version": "5.5.0", "resolved": "https: "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "dev": true, "dependencies": { "has-flag": "^3.0.0" }, "engines": { "node": ">=4" } }, "node_modules/@babel/parser": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==", "dev": true, "bin": { "parser": "bin/babel-parser.js" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-FB9iYlz7rURmRJyXRKEnalYPPdn87H5no108cyuQQyMwlpJ2SJtpIUBI27kdTin956pz+LPypkPVPUTlxOmrsg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-Hyph9LseGvAeeXzikV88bczhsrLrIZqDPxO+sSmAunMPaGrBGhfMWzCPYTtiW9t+HzSE2wtV8e5cc5P6r1xMDQ==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", "@babel/plugin-transform-optional-chaining": "^7.22.15" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.13.0" } }, "node_modules/@babel/plugin-proposal-private-property-in-object": { "version": "7.21.0-placeholder-for-preset-env.2", "resolved": "https: "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY "dev": true, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-async-generators": { "version": "7.8.4", "resolved": "https: "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-bigint": { "version": "7.8.3", "resolved": "https: "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-class-properties": { "version": "7.12.13", "resolved": "https: "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.12.13" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-class-static-block": { "version": "7.14.5", "resolved": "https: "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-dynamic-import": { "version": "7.8.3", "resolved": "https: "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-export-namespace-from": { "version": "7.8.3", "resolved": "https: "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.3" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-import-assertions": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-rdV97N7KqsRzeNGoWUOK6yUsWarLjE5Su/Snk9IYPU9CwkWHs4t+rTGOvffTR8XGkJMTAdLfO0xVnXm8wugIJg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-import-attributes": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-KwvoWDeNKPETmozyFE0P2rOLqh39EoQHNjqizrI5B8Vt0ZNS7M56s7dAiAqbYfiAYOuIzIh96z3iR2ktgu3tEg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-import-meta": { "version": "7.10.4", "resolved": "https: "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-json-strings": { "version": "7.8.3", "resolved": "https: "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-jsx": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-logical-assignment-operators": { "version": "7.10.4", "resolved": "https: "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { "version": "7.8.3", "resolved": "https: "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-numeric-separator": { "version": "7.10.4", "resolved": "https: "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-object-rest-spread": { "version": "7.8.3", "resolved": "https: "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-optional-catch-binding": { "version": "7.8.3", "resolved": "https: "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-optional-chaining": { "version": "7.8.3", "resolved": "https: "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-private-property-in-object": { "version": "7.14.5", "resolved": "https: "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-top-level-await": { "version": "7.14.5", "resolved": "https: "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-typescript": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-syntax-unicode-sets-regex": { "version": "7.18.6", "resolved": "https: "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", "dev": true, "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/plugin-transform-arrow-functions": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-26lTNXoVRdAnsaDXPpvCNUq+OVWEVC6bx7Vvz9rC53F2bagUWW4u4ii2+h8Fejfh7RYqPxn+libeFBBck9muEw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-async-generator-functions": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-jBm1Es25Y+tVoTi5rfd5t1KLmL8ogLKpXszboWOTTtGFGz2RKnQe2yn7HbZ+kb/B8N0FVSGQo874NSlOU1T4+w==", "dev": true, "dependencies": { "@babel/helper-environment-visitor": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-remap-async-to-generator": "^7.22.9", "@babel/plugin-syntax-async-generators": "^7.8.4" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-async-to-generator": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-b1A8D8ZzE/VhNDoV1MSJTnpKkCG5bJo+19R4o4oy03zM7ws8yEMK755j61Dc3EyvdysbqH5BOOTquJ7ZX9C6vQ==", "dev": true, "dependencies": { "@babel/helper-module-imports": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-remap-async-to-generator": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-block-scoped-functions": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-tdXZ2UdknEKQWKJP1KMNmuF5Lx3MymtMN/pvA+p/VEkhK8jVcQ1fzSy8KM9qRYhAf2/lV33hoMPKI/xaI9sADA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-block-scoping": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-cOsrbmIOXmf+5YbL99/S49Y3j46k/T16b9ml8bm9lP6N9US5iQ2yBK7gpui1pg0V/WMcXdkfKbTb7HXq9u+v4g==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-class-properties": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-nDkQ0NfkOhPTq8YCLiWNxp1+f9fCobEjCb0n8WdbNUBc4IB5V7P1QnX9IjpSoquKrXF5SKojHleVNs2vGeHCHQ==", "dev": true, "dependencies": { "@babel/helper-create-class-features-plugin": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-class-static-block": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-GMM8gGmqI7guS/llMFk1bJDkKfn3v3C4KHK9Yg1ey5qcHcOlKb0QvcMrgzvxo+T03/4szNh5lghY+fEC98Kq9g==", "dev": true, "dependencies": { "@babel/helper-create-class-features-plugin": "^7.22.11", "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-class-static-block": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.12.0" } }, "node_modules/@babel/plugin-transform-classes": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-VbbC3PGjBdE0wAWDdHM9G8Gm977pnYI0XpqMd6LrKISj8/DJXEsWqgRuTYaNE9Bv0JGhTZUzHDlMk18IpOuoqw==", "dev": true, "dependencies": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-compilation-targets": "^7.22.15", "@babel/helper-environment-visitor": "^7.22.5", "@babel/helper-function-name": "^7.22.5", "@babel/helper-optimise-call-expression": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-replace-supers": "^7.22.9", "@babel/helper-split-export-declaration": "^7.22.6", "globals": "^11.1.0" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-computed-properties": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-4GHWBgRf0krxPX+AaPtgBAlTgTeZmqDynokHOX7aqqAB4tHs3U2Y02zH6ETFdLZGcg9UQSD1WCmkVrE9ErHeOg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/template": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-destructuring": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-vaMdgNXFkYrB+8lbgniSYWHsgqK5gjaMNcc84bMIOMRLH0L9AqYq3hwMdvnyqj1OPqea8UtjPEuS/DCenah1wg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-dotall-regex": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-5/Yk9QxCQCl+sOIB1WelKnVRxTJDSAIxtJLL2/pqL14ZVlbH0fUQUZa/T5/UnQtBNgghR7mfB8ERBKyKPCi7Vw==", "dev": true, "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-duplicate-keys": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-dEnYD+9BBgld5VBXHnF/DbYGp3fqGMsyxKbtD1mDyIA7AkTSpKXFhCVuj/oQVOoALfBs77DudA0BE4d5mcpmqw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-dynamic-import": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-g/21plo58sfteWjaO0ZNVb+uEOkJNjAaHhbejrnBmu011l/eNDScmkbjCC3l4FKb10ViaGU4aOkFznSu2zRHgA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-exponentiation-operator": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-vIpJFNM/FjZ4rh1myqIya9jXwrwwgFRHPjT3DkUA9ZLHuzox8jiXkOLvwm1H+PQIP3CqfC++WPKeuDi0Sjdj1g==", "dev": true, "dependencies": { "@babel/helper-builder-binary-assignment-operator-visitor": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-export-namespace-from": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-xa7aad7q7OiT8oNZ1mU7NrISjlSkVdMbNxn9IuLZyL9AJEhs1Apba3I+u5riX1dIkdptP5EKDG5XDPByWxtehw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-export-namespace-from": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-for-of": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-me6VGeHsx30+xh9fbDLLPi0J1HzmeIIyenoOQHuw2D4m2SAU3NrspX5XxJLBpqn5yrLzrlw2Iy3RA "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-function-name": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-UIzQNMS0p0HHiQm3oelztj+ECwFnj+ZRV4KnguvlsD2of1whUeM6o7wGNj6oLwcDoAXQ8gEqfgC24D+VdIcevg==", "dev": true, "dependencies": { "@babel/helper-compilation-targets": "^7.22.5", "@babel/helper-function-name": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-json-strings": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-CxT5tCqpA9/jXFlme9xIBCc5RPtdDq3JpkkhgHQqtDdiTnTI0jtZ0QzXhr5DILeYifDPp2wvY2ad+7+hLMW5Pw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-json-strings": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-literals": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-fTLj4D79M+mepcw3dgFBTIDYpbcB9Sm0bpm4ppXPaO+U+PKFFyV9MGRvS0gvGw62sd10kT5lRMKXAADb9pWy8g==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-logical-assignment-operators": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-qQwRTP4+6xFCDV5k7gZBF3C31K34ut0tbEcTKxlX/0KXxm9GLcO14p570aWxFvVzx6QAfPgq7gaeIHXJC8LswQ==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-member-expression-literals": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-RZEdkNtzzYCFl9SE9ATaUMTj2hqMb4StarOJLrZRbqqU4HSBE7UlBw9WBWQiDzrJZJdUWiMTVDI6Gv/8DPvfew==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-modules-amd": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-xWT5gefv2HGSm4QHtgc1sYPbseOyf+FFDo2JbpE25GWl5BqTGO9IMwTYJRoIdjsF85GE+VegHxSCUt5EvoYTAw==", "dev": true, "dependencies": { "@babel/helper-module-transforms": "^7.23.0", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-modules-commonjs": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-32Xzss14/UVc7k9g775yMIvkVK8xwKE0DPdP5JTapr3+Z9w4tzeOuLNY6BXDQR6BdnzIlXnCGAzsk/ICHBLVWQ==", "dev": true, "dependencies": { "@babel/helper-module-transforms": "^7.23.0", "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-simple-access": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-modules-systemjs": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-qBej6ctXZD2f+DhlOC9yO47yEYgUh5CZNz/aBoH4j/3NOlRfJXJbY7xDQCqQVf9KbrqGzIWER1f23doHGrIHFg==", "dev": true, "dependencies": { "@babel/helper-hoist-variables": "^7.22.5", "@babel/helper-module-transforms": "^7.23.0", "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-validator-identifier": "^7.22.20" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-modules-umd": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-+S6kzefN/E1vkSsKx8kmQuqeQsvCKCd1fraCM7zXm4SFoggI099Tr4G8U81+5gtMdUeMQ4ipdQffbKLX0/7dBQ==", "dev": true, "dependencies": { "@babel/helper-module-transforms": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ==", "dev": true, "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/plugin-transform-new-target": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-AsF7K0Fx/cNKVyk3a+DW0JLo+Ua598/NxMRvxDnkpCIGFh43+h/v2xyhRUYf6oD8gE4QtL83C7zZVghMjHd+iw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-YZWOw4HxXrotb5xsjMJUDlLgcDXSfO9eCmdl1bgW4+/lAGdkjaEvOnQ4p5WKKdUgSzO39dgPl0pTnfxm0OAXcg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-numeric-separator": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-3dzU4QGPsILdJbASKhF/V2TVP+gJya1PsueQCxIPCEcerqF21oEcrob4mzjsp2Py/1nLfF5m+xYNMDpmA8vffg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-numeric-separator": "^7.10.4" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-object-rest-spread": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-fEB+I1+gAmfAyxZcX1+ZUwLeAuuf8VIg67CTznZE0MqVFumWkh8xWtn58I4dxdVf080wn7gzWoF8vndOViJe9Q==", "dev": true, "dependencies": { "@babel/compat-data": "^7.22.9", "@babel/helper-compilation-targets": "^7.22.15", "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-transform-parameters": "^7.22.15" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-object-super": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-klXqyaT9trSjIUrcsYIfETAzmOEZL3cBYqOYLJxBHfMFFggmXOv+NYSX/Jbs9mzMVESw/WycLFPRx8ba/b2Ipw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-replace-supers": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-optional-catch-binding": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-rli0WxesXUeCJnMYhzAglEjLWVDF6ahb45HuprcmQuLidBJFWjNnOzssk2kuc6e33FlLaiZhG/kUIzUMWdBKaQ==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-optional-chaining": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-sBBGXbLJjxTzLBF5rFWaikMnOGOk/BmK6vVByIdEggZ7Vn6CvWXZyRkkLFK6WE0IF8jSliyOkUN6SScFgzCM0g==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", "@babel/plugin-syntax-optional-chaining": "^7.8.3" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-parameters": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-hjk7qKIqhyzhhUvRT683TYQOFa/4cQKwQy7ALvTpODswN40MljzNDa0YldevS6tGbxwaEKVn502JmY0dP7qEtQ==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-private-methods": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-PPjh4gyrQnGe97JTalgRGMuU4icsZFnWkzicB/fUtzlKUqvsWBKEpPPfr5a2JiyirZkHxnAqkQMO5Z5B2kK3fA==", "dev": true, "dependencies": { "@babel/helper-create-class-features-plugin": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-private-property-in-object": { "version": "7.22.11", "resolved": "https: "integrity": "sha512-sSCbqZDBKHetvjSwpyWzhuHkmW5RummxJBVbYLkGkaiTOWGxml7SXt0iWa03bzxFIx7wOj3g/ILRd0RcJKBeSQ==", "dev": true, "dependencies": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-create-class-features-plugin": "^7.22.11", "@babel/helper-plugin-utils": "^7.22.5", "@babel/plugin-syntax-private-property-in-object": "^7.14.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-property-literals": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-TiOArgddK3mK/x1Qwf5hay2pxI6wCZnvQqrFSqbtg1GLl2JcNMitVH/YnqjP+M31pLUeTfzY1HAXFDnUBV30rQ==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-regenerator": { "version": "7.22.10", "resolved": "https: "integrity": "sha512-F28b1mDt8KcT5bUyJc/U9nwzw6cV+UmTeRlXYIl2TNqMMJif0Jeey9/RQ3C4NOd2zp0/TRsDns9ttj2L523rsw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "regenerator-transform": "^0.15.2" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-reserved-words": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-DTtGKFRQUDm8svigJzZHzb/2xatPc6TzNvAIJ5GqOKDsGFYgAskjRulbR/vGsPKq3OPqtexnz327qYpP57RFyA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-shorthand-properties": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-vM4fq9IXHscXVKzDv5itkO1X52SmdFBFcMIBZ2FRn2nqVYqw6dBexUgMvAjHW+KXpPPViD/Yo3GrDEBaRC0QYA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-spread": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-5ZzDQIGyvN4w8+dMmpohL6MBo+l2G7tfC/O2Dg7/hjpgeWvUx8FzfeOKxGog9IimPa4YekaQ9PlDqTLOljkcxg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-sticky-regex": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-zf7LuNpHG0iEeiyCNwX4j3gDg1jgt1k3ZdXBKbZSoA3BbGQGvMiSvfbZRR3Dr3aeJe3ooWFZxOOG3IRStYp2Bw==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-template-literals": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-5ciOehRNf+EyUeewo8NkbQiUs4d6ZxiHo6BcBcnFlgiJfu16q0bQUw9Jvo0b0gBKFG1SMhDSjeKXSYuJLeFSMA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-typeof-symbol": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-bYkI5lMzL4kPii4HHEEChkD0rkc+nvnlR6+o/qdqR6zrm0Sv/nodmyLhlq2DO0YKLUNd2VePmPRjJXSBh9OIdA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-unicode-escapes": { "version": "7.22.10", "resolved": "https: "integrity": "sha512-lRfaRKGZCBqDlRU3UIFovdp9c9mEvlylmpod0/OatICsSfuQ9YFthRo1tpTkGsklEefZdqlEFdY4A2dwTb6ohg==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-unicode-property-regex": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-HCCIb+CbJIAE6sXn5CjFQXMwkCClcOfPCzTlilJ8cUatfzwHlWQkbtV0zD338u9dZskwvuOYTuuaMaA8J5EI5A==", "dev": true, "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-unicode-regex": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-028laaOKptN5vHJf9/Arr/HiJekMd41hOEZYvNsrsXqJ7YPYuX2bQxh31fkZzGmq3YqHRJzYFFAVYvKfMPKqyg==", "dev": true, "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/plugin-transform-unicode-sets-regex": { "version": "7.22.5", "resolved": "https: "integrity": "sha512-lhMfi4FC15j13eKrh3DnYHjpGj6UKQHtNKTbtc1igvAhRy4+kLhV07OpLcsN0VgDEw/MjAvJO4BdMJsHwMhzCg==", "dev": true, "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/@babel/preset-env": { "version": "7.22.20", "resolved": "https: "integrity": "sha512-11MY04gGC4kSzlPHRfvVkNAZhUxOvm7DCJ37hPDnUENwe06npjIRAfInEMTGSb4LZK5ZgDFkv5hw0lGebHeTyg==", "dev": true, "dependencies": { "@babel/compat-data": "^7.22.20", "@babel/helper-compilation-targets": "^7.22.15", "@babel/helper-plugin-utils": "^7.22.5", "@babel/helper-validator-option": "^7.22.15", "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.22.15", "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.22.15", "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-class-properties": "^7.12.13", "@babel/plugin-syntax-class-static-block": "^7.14.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-syntax-export-namespace-from": "^7.8.3", "@babel/plugin-syntax-import-assertions": "^7.22.5", "@babel/plugin-syntax-import-attributes": "^7.22.5", "@babel/plugin-syntax-import-meta": "^7.10.4", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", "@babel/plugin-syntax-numeric-separator": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", "@babel/plugin-syntax-optional-chaining": "^7.8.3", "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5", "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", "@babel/plugin-transform-arrow-functions": "^7.22.5", "@babel/plugin-transform-async-generator-functions": "^7.22.15", "@babel/plugin-transform-async-to-generator": "^7.22.5", "@babel/plugin-transform-block-scoped-functions": "^7.22.5", "@babel/plugin-transform-block-scoping": "^7.22.15", "@babel/plugin-transform-class-properties": "^7.22.5", "@babel/plugin-transform-class-static-block": "^7.22.11", "@babel/plugin-transform-classes": "^7.22.15", "@babel/plugin-transform-computed-properties": "^7.22.5", "@babel/plugin-transform-destructuring": "^7.22.15", "@babel/plugin-transform-dotall-regex": "^7.22.5", "@babel/plugin-transform-duplicate-keys": "^7.22.5", "@babel/plugin-transform-dynamic-import": "^7.22.11", "@babel/plugin-transform-exponentiation-operator": "^7.22.5", "@babel/plugin-transform-export-namespace-from": "^7.22.11", "@babel/plugin-transform-for-of": "^7.22.15", "@babel/plugin-transform-function-name": "^7.22.5", "@babel/plugin-transform-json-strings": "^7.22.11", "@babel/plugin-transform-literals": "^7.22.5", "@babel/plugin-transform-logical-assignment-operators": "^7.22.11", "@babel/plugin-transform-member-expression-literals": "^7.22.5", "@babel/plugin-transform-modules-amd": "^7.22.5", "@babel/plugin-transform-modules-commonjs": "^7.22.15", "@babel/plugin-transform-modules-systemjs": "^7.22.11", "@babel/plugin-transform-modules-umd": "^7.22.5", "@babel/plugin-transform-named-capturing-groups-regex": "^7.22.5", "@babel/plugin-transform-new-target": "^7.22.5", "@babel/plugin-transform-nullish-coalescing-operator": "^7.22.11", "@babel/plugin-transform-numeric-separator": "^7.22.11", "@babel/plugin-transform-object-rest-spread": "^7.22.15", "@babel/plugin-transform-object-super": "^7.22.5", "@babel/plugin-transform-optional-catch-binding": "^7.22.11", "@babel/plugin-transform-optional-chaining": "^7.22.15", "@babel/plugin-transform-parameters": "^7.22.15", "@babel/plugin-transform-private-methods": "^7.22.5", "@babel/plugin-transform-private-property-in-object": "^7.22.11", "@babel/plugin-transform-property-literals": "^7.22.5", "@babel/plugin-transform-regenerator": "^7.22.10", "@babel/plugin-transform-reserved-words": "^7.22.5", "@babel/plugin-transform-shorthand-properties": "^7.22.5", "@babel/plugin-transform-spread": "^7.22.5", "@babel/plugin-transform-sticky-regex": "^7.22.5", "@babel/plugin-transform-template-literals": "^7.22.5", "@babel/plugin-transform-typeof-symbol": "^7.22.5", "@babel/plugin-transform-unicode-escapes": "^7.22.10", "@babel/plugin-transform-unicode-property-regex": "^7.22.5", "@babel/plugin-transform-unicode-regex": "^7.22.5", "@babel/plugin-transform-unicode-sets-regex": "^7.22.5", "@babel/preset-modules": "0.1.6-no-external-plugins", "@babel/types": "^7.22.19", "babel-plugin-polyfill-corejs2": "^0.4.5", "babel-plugin-polyfill-corejs3": "^0.8.3", "babel-plugin-polyfill-regenerator": "^0.5.2", "core-js-compat": "^3.31.0", "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/@babel/preset-modules": { "version": "0.1.6-no-external-plugins", "resolved": "https: "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@babel/types": "^7.4.4", "esutils": "^2.0.2" }, "peerDependencies": { "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" } }, "node_modules/@babel/regjsgen": { "version": "0.8.0", "resolved": "https: "integrity": "sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==", "dev": true }, "node_modules/@babel/runtime": { "version": "7.23.1", "resolved": "https: "integrity": "sha512-hC2v6p8ZSI/W0HUzh3V8C5g+NwSKzKPtJwSpTjwl0o297GP9+ZLQSkdvHz46CM3LqyoXxq+5G9komY+eSqSO0g==", "dev": true, "dependencies": { "regenerator-runtime": "^0.14.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/template": { "version": "7.22.15", "resolved": "https: "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", "dev": true, "dependencies": { "@babel/code-frame": "^7.22.13", "@babel/parser": "^7.22.15", "@babel/types": "^7.22.15" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { "version": "7.23.2", "resolved": "https: "integrity": "sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==", "dev": true, "dependencies": { "@babel/code-frame": "^7.22.13", "@babel/generator": "^7.23.0", "@babel/helper-environment-visitor": "^7.22.20", "@babel/helper-function-name": "^7.23.0", "@babel/helper-hoist-variables": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", "@babel/parser": "^7.23.0", "@babel/types": "^7.23.0", "debug": "^4.1.0", "globals": "^11.1.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/types": { "version": "7.23.0", "resolved": "https: "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==", "dev": true, "dependencies": { "@babel/helper-string-parser": "^7.22.5", "@babel/helper-validator-identifier": "^7.22.20", "to-fast-properties": "^2.0.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@bcoe/v8-coverage": { "version": "0.2.3", "resolved": "https: "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, "node_modules/@istanbuljs/load-nyc-config": { "version": "1.1.0", "resolved": "https: "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", "dev": true, "dependencies": { "camelcase": "^5.3.1", "find-up": "^4.1.0", "get-package-type": "^0.1.0", "js-yaml": "^3.13.1", "resolve-from": "^5.0.0" }, "engines": { "node": ">=8" } }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", "resolved": "https: "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/@jest/console": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", "dev": true, "dependencies": { "@jest/types": "^29.6.3", "@types/node": "*", "chalk": "^4.0.0", "jest-message-util": "^29.7.0", "jest-util": "^29.7.0", "slash": "^3.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/core": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", "dev": true, "dependencies": { "@jest/console": "^29.7.0", "@jest/reporters": "^29.7.0", "@jest/test-result": "^29.7.0", "@jest/transform": "^29.7.0", "@jest/types": "^29.6.3", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "ci-info": "^3.2.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "jest-changed-files": "^29.7.0", "jest-config": "^29.7.0", "jest-haste-map": "^29.7.0", "jest-message-util": "^29.7.0", "jest-regex-util": "^29.6.3", "jest-resolve": "^29.7.0", "jest-resolve-dependencies": "^29.7.0", "jest-runner": "^29.7.0", "jest-runtime": "^29.7.0", "jest-snapshot": "^29.7.0", "jest-util": "^29.7.0", "jest-validate": "^29.7.0", "jest-watcher": "^29.7.0", "micromatch": "^4.0.4", "pretty-format": "^29.7.0", "slash": "^3.0.0", "strip-ansi": "^6.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "peerDependenciesMeta": { "node-notifier": { "optional": true } } }, "node_modules/@jest/environment": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", "dev": true, "dependencies": { "@jest/fake-timers": "^29.7.0", "@jest/types": "^29.6.3", "@types/node": "*", "jest-mock": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/expect": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", "dev": true, "dependencies": { "expect": "^29.7.0", "jest-snapshot": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/expect-utils": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", "dev": true, "dependencies": { "jest-get-type": "^29.6.3" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/fake-timers": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", "dev": true, "dependencies": { "@jest/types": "^29.6.3", "@sinonjs/fake-timers": "^10.0.2", "@types/node": "*", "jest-message-util": "^29.7.0", "jest-mock": "^29.7.0", "jest-util": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/globals": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", "dev": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/expect": "^29.7.0", "@jest/types": "^29.6.3", "jest-mock": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/reporters": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", "dev": true, "dependencies": { "@bcoe/v8-coverage": "^0.2.3", "@jest/console": "^29.7.0", "@jest/test-result": "^29.7.0", "@jest/transform": "^29.7.0", "@jest/types": "^29.6.3", "@jridgewell/trace-mapping": "^0.3.18", "@types/node": "*", "chalk": "^4.0.0", "collect-v8-coverage": "^1.0.0", "exit": "^0.1.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "istanbul-lib-coverage": "^3.0.0", "istanbul-lib-instrument": "^6.0.0", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.0", "istanbul-reports": "^3.1.3", "jest-message-util": "^29.7.0", "jest-util": "^29.7.0", "jest-worker": "^29.7.0", "slash": "^3.0.0", "string-length": "^4.0.1", "strip-ansi": "^6.0.0", "v8-to-istanbul": "^9.0.1" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "peerDependenciesMeta": { "node-notifier": { "optional": true } } }, "node_modules/@jest/schemas": { "version": "29.6.3", "resolved": "https: "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", "dev": true, "dependencies": { "@sinclair/typebox": "^0.27.8" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/source-map": { "version": "29.6.3", "resolved": "https: "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", "dev": true, "dependencies": { "@jridgewell/trace-mapping": "^0.3.18", "callsites": "^3.0.0", "graceful-fs": "^4.2.9" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/test-result": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", "dev": true, "dependencies": { "@jest/console": "^29.7.0", "@jest/types": "^29.6.3", "@types/istanbul-lib-coverage": "^2.0.0", "collect-v8-coverage": "^1.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/test-sequencer": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", "dev": true, "dependencies": { "@jest/test-result": "^29.7.0", "graceful-fs": "^4.2.9", "jest-haste-map": "^29.7.0", "slash": "^3.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/transform": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", "dev": true, "dependencies": { "@babel/core": "^7.11.6", "@jest/types": "^29.6.3", "@jridgewell/trace-mapping": "^0.3.18", "babel-plugin-istanbul": "^6.1.1", "chalk": "^4.0.0", "convert-source-map": "^2.0.0", "fast-json-stable-stringify": "^2.1.0", "graceful-fs": "^4.2.9", "jest-haste-map": "^29.7.0", "jest-regex-util": "^29.6.3", "jest-util": "^29.7.0", "micromatch": "^4.0.4", "pirates": "^4.0.4", "slash": "^3.0.0", "write-file-atomic": "^4.0.2" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jest/types": { "version": "29.6.3", "resolved": "https: "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", "dev": true, "dependencies": { "@jest/schemas": "^29.6.3", "@types/istanbul-lib-coverage": "^2.0.0", "@types/istanbul-reports": "^3.0.0", "@types/node": "*", "@types/yargs": "^17.0.8", "chalk": "^4.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.3", "resolved": "https: "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", "dev": true, "dependencies": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.9" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.1", "resolved": "https: "integrity": "sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==", "dev": true, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/set-array": { "version": "1.1.2", "resolved": "https: "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", "dev": true, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/sourcemap-codec": { "version": "1.4.15", "resolved": "https: "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", "dev": true }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.19", "resolved": "https: "integrity": "sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==", "dev": true, "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https: "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", "dev": true }, "node_modules/@sinonjs/commons": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==", "dev": true, "dependencies": { "type-detect": "4.0.8" } }, "node_modules/@sinonjs/fake-timers": { "version": "10.3.0", "resolved": "https: "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", "dev": true, "dependencies": { "@sinonjs/commons": "^3.0.0" } }, "node_modules/@tootallnate/once": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", "dev": true, "engines": { "node": ">= 10" } }, "node_modules/@types/babel__core": { "version": "7.20.2", "resolved": "https: "integrity": "sha512-pNpr1T1xLUc2l3xJKuPtsEky3ybxN3m4fJkknfIpTCTfIZCDW57oAg+EfCgIIp2rvCe0Wn++/FfodDS4YXxBwA==", "dev": true, "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, "node_modules/@types/babel__generator": { "version": "7.6.5", "resolved": "https: "integrity": "sha512-h9yIuWbJKdOPLJTbmSpPzkF67e659PbQDba7ifWm5BJ8xTv+sDmS7rFmywkWOvXedGTivCdeGSIIX8WLcRTz8w==", "dev": true, "dependencies": { "@babel/types": "^7.0.0" } }, "node_modules/@types/babel__template": { "version": "7.4.2", "resolved": "https: "integrity": "sha512-/AVzPICMhMOMYoSx9MoKpGDKdBRsIXMNByh1PXSZoa+v6ZoLa8xxtsT/uLQ/NJm0XVAWl/BvId4MlDeXJaeIZQ==", "dev": true, "dependencies": { "@babel/parser": "^7.1.0", "@babel/types": "^7.0.0" } }, "node_modules/@types/babel__traverse": { "version": "7.20.2", "resolved": "https: "integrity": "sha512-ojlGK1Hsfce93J0+kn3H5R73elidKUaZonirN33GSmgTUMpzI/MIFfSpF3haANe3G1bEBS9/9/QEqwTzwqFsKw==", "dev": true, "dependencies": { "@babel/types": "^7.20.7" } }, "node_modules/@types/graceful-fs": { "version": "4.1.7", "resolved": "https: "integrity": "sha512-MhzcwU8aUygZroVwL2jeYk6JisJrPl/oov/gsgGCue9mkgl9wjGbzReYQClxiUgFDnib9FuHqTndccKeZKxTRw==", "dev": true, "dependencies": { "@types/node": "*" } }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.4", "resolved": "https: "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", "dev": true }, "node_modules/@types/istanbul-lib-report": { "version": "3.0.1", "resolved": "https: "integrity": "sha512-gPQuzaPR5h/djlAv2apEG1HVOyj1IUs7GpfMZixU0/0KXT3pm64ylHuMUI1/Akh+sq/iikxg6Z2j+fcMDXaaTQ==", "dev": true, "dependencies": { "@types/istanbul-lib-coverage": "*" } }, "node_modules/@types/istanbul-reports": { "version": "3.0.2", "resolved": "https: "integrity": "sha512-kv43F9eb3Lhj+lr/Hn6OcLCs/sSM8bt+fIaP11rCYngfV6NVjzWXJ17owQtDQTL9tQ8WSLUrGsSJ6rJz0F1w1A==", "dev": true, "dependencies": { "@types/istanbul-lib-report": "*" } }, "node_modules/@types/jest": { "version": "29.5.5", "resolved": "https: "integrity": "sha512-ebylz2hnsWR9mYvmBFbXJXr+33UPc4+ZdxyDXh5w0FlPBTfCVN3wPL+kuOiQt3xvrK419v7XWeAs+AeOksafXg==", "dev": true, "dependencies": { "expect": "^29.0.0", "pretty-format": "^29.0.0" } }, "node_modules/@types/jsdom": { "version": "20.0.1", "resolved": "https: "integrity": "sha512-d0r18sZPmMQr1eG35u12FZfhIXNrnsPU/g5wvRKCUf/tOGilKKwYMYGqh33BNR6ba+2gkHw1EUiHoN3mn7E5IQ==", "dev": true, "dependencies": { "@types/node": "*", "@types/tough-cookie": "*", "parse5": "^7.0.0" } }, "node_modules/@types/node": { "version": "20.8.3", "resolved": "https: "integrity": "sha512-jxiZQFpb+NlH5kjW49vXxvxTjeeqlbsnTAdBTKpzEdPs9itay7MscYXz3Fo9VYFEsfQ6LJFitHad3faerLAjCw==", "dev": true }, "node_modules/@types/stack-utils": { "version": "2.0.1", "resolved": "https: "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", "dev": true }, "node_modules/@types/tough-cookie": { "version": "4.0.3", "resolved": "https: "integrity": "sha512-THo502dA5PzG/sfQH+42Lw3fvmYkceefOspdCwpHRul8ik2Jv1K8I5OZz1AT3/rs46kwgMCe9bSBmDLYkkOMGg==", "dev": true }, "node_modules/@types/yargs": { "version": "17.0.28", "resolved": "https: "integrity": "sha512-N3e3fkS86hNhtk6BEnc0rj3zcehaxx8QWhCROJkqpl5Zaoi7nAic3jH8q94jVD3zu5LGk+PUB6KAiDmimYOEQw==", "dev": true, "dependencies": { "@types/yargs-parser": "*" } }, "node_modules/@types/yargs-parser": { "version": "21.0.1", "resolved": "https: "integrity": "sha512-axdPBuLuEJt0c4yI5OZssC19K2Mq1uKdrfZBzuxLvaztgqUtFYZUNw7lETExPYJR9jdEoIg4mb7RQKRQzOkeGQ==", "dev": true }, "node_modules/abab": { "version": "2.0.6", "resolved": "https: "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", "dev": true }, "node_modules/acorn": { "version": "8.10.0", "resolved": "https: "integrity": "sha512-F0SAmZ8iUtS "dev": true, "bin": { "acorn": "bin/acorn" }, "engines": { "node": ">=0.4.0" } }, "node_modules/acorn-globals": { "version": "7.0.1", "resolved": "https: "integrity": "sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==", "dev": true, "dependencies": { "acorn": "^8.1.0", "acorn-walk": "^8.0.2" } }, "node_modules/acorn-walk": { "version": "8.2.0", "resolved": "https: "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", "dev": true, "engines": { "node": ">=0.4.0" } }, "node_modules/agent-base": { "version": "6.0.2", "resolved": "https: "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "dev": true, "dependencies": { "debug": "4" }, "engines": { "node": ">= 6.0.0" } }, "node_modules/ansi-escapes": { "version": "4.3.2", "resolved": "https: "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dev": true, "dependencies": { "type-fest": "^0.21.3" }, "engines": { "node": ">=8" }, "funding": { "url": "https: } }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https: "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https: "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { "color-convert": "^2.0.1" }, "engines": { "node": ">=8" }, "funding": { "url": "https: } }, "node_modules/anymatch": { "version": "3.1.3", "resolved": "https: "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", "dev": true, "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" }, "engines": { "node": ">= 8" } }, "node_modules/argparse": { "version": "1.0.10", "resolved": "https: "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dev": true, "dependencies": { "sprintf-js": "~1.0.2" } }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https: "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", "dev": true }, "node_modules/babel-jest": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", "dev": true, "dependencies": { "@jest/transform": "^29.7.0", "@types/babel__core": "^7.1.14", "babel-plugin-istanbul": "^6.1.1", "babel-preset-jest": "^29.6.3", "chalk": "^4.0.0", "graceful-fs": "^4.2.9", "slash": "^3.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { "@babel/core": "^7.8.0" } }, "node_modules/babel-plugin-istanbul": { "version": "6.1.1", "resolved": "https: "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@istanbuljs/load-nyc-config": "^1.0.0", "@istanbuljs/schema": "^0.1.2", "istanbul-lib-instrument": "^5.0.4", "test-exclude": "^6.0.0" }, "engines": { "node": ">=8" } }, "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { "version": "5.2.1", "resolved": "https: "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", "dev": true, "dependencies": { "@babel/core": "^7.12.3", "@babel/parser": "^7.14.7", "@istanbuljs/schema": "^0.1.2", "istanbul-lib-coverage": "^3.2.0", "semver": "^6.3.0" }, "engines": { "node": ">=8" } }, "node_modules/babel-plugin-jest-hoist": { "version": "29.6.3", "resolved": "https: "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", "dev": true, "dependencies": { "@babel/template": "^7.3.3", "@babel/types": "^7.3.3", "@types/babel__core": "^7.1.14", "@types/babel__traverse": "^7.0.6" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/babel-plugin-polyfill-corejs2": { "version": "0.4.5", "resolved": "https: "integrity": "sha512-19hwUH5FKl49JEsvyTcoHakh6BE0wgXLLptIyKZ3PijHc/Ci521wygORCUCCred+E/twuqRyAkE02BAWPmsHOg==", "dev": true, "dependencies": { "@babel/compat-data": "^7.22.6", "@babel/helper-define-polyfill-provider": "^0.4.2", "semver": "^6.3.1" }, "peerDependencies": { "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" } }, "node_modules/babel-plugin-polyfill-corejs3": { "version": "0.8.4", "resolved": "https: "integrity": "sha512-9l "dev": true, "dependencies": { "@babel/helper-define-polyfill-provider": "^0.4.2", "core-js-compat": "^3.32.2" }, "peerDependencies": { "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" } }, "node_modules/babel-plugin-polyfill-regenerator": { "version": "0.5.2", "resolved": "https: "integrity": "sha512-tAlOptU0Xj34V1Y2PNTL4Y0FOJMDB6bZmoW39FeCQIhigGLkqu3Fj6uiXpxIf6Ij274ENdYx64y6Au+ZKlb1IA==", "dev": true, "dependencies": { "@babel/helper-define-polyfill-provider": "^0.4.2" }, "peerDependencies": { "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" } }, "node_modules/babel-plugin-transform-import-meta": { "version": "2.2.1", "resolved": "https: "integrity": "sha512-AxNh27Pcg8Kt112RGa3Vod2QS2YXKKJ6+nSvRtv7qQTJAdx0MZa4UHZ4lnxHUWA2MNbLuZQv5FVab4P1CoLOWw==", "dev": true, "dependencies": { "@babel/template": "^7.4.4", "tslib": "^2.4.0" }, "peerDependencies": { "@babel/core": "^7.10.0" } }, "node_modules/babel-preset-current-node-syntax": { "version": "1.0.1", "resolved": "https: "integrity": "sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==", "dev": true, "dependencies": { "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-bigint": "^7.8.3", "@babel/plugin-syntax-class-properties": "^7.8.3", "@babel/plugin-syntax-import-meta": "^7.8.3", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.8.3", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", "@babel/plugin-syntax-numeric-separator": "^7.8.3", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", "@babel/plugin-syntax-optional-chaining": "^7.8.3", "@babel/plugin-syntax-top-level-await": "^7.8.3" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/babel-preset-jest": { "version": "29.6.3", "resolved": "https: "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", "dev": true, "dependencies": { "babel-plugin-jest-hoist": "^29.6.3", "babel-preset-current-node-syntax": "^1.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https: "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https: "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "node_modules/braces": { "version": "3.0.2", "resolved": "https: "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "dev": true, "dependencies": { "fill-range": "^7.0.1" }, "engines": { "node": ">=8" } }, "node_modules/browserslist": { "version": "4.22.1", "resolved": "https: "integrity": "sha512-FEVc202+2iuClEhZhrWy6ZiAcRLvNMyYcxZ8raemul1DYVOVdFsbqckWLdsixQZCpJlwe77Z3UTalE7jsjnKfQ==", "dev": true, "funding": [ { "type": "opencollective", "url": "https: }, { "type": "tidelift", "url": "https: }, { "type": "github", "url": "https: } ], "dependencies": { "caniuse-lite": "^1.0.30001541", "electron-to-chromium": "^1.4.535", "node-releases": "^2.0.13", "update-browserslist-db": "^1.0.13" }, "bin": { "browserslist": "cli.js" }, "engines": { "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, "node_modules/bser": { "version": "2.1.1", "resolved": "https: "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", "dev": true, "dependencies": { "node-int64": "^0.4.0" } }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https: "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https: "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/camelcase": { "version": "5.3.1", "resolved": "https: "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/caniuse-lite": { "version": "1.0.30001546", "resolved": "https: "integrity": "sha512-zvtSJwuQFpewSyRrI3AsftF6rM0X80mZkChIt1spBGEvRglCrjTniXvinc8JKRoqTwXAgvqTImaN9igfSMtUBw==", "dev": true, "funding": [ { "type": "opencollective", "url": "https: }, { "type": "tidelift", "url": "https: }, { "type": "github", "url": "https: } ] }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https: "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/char-regex": { "version": "1.0.2", "resolved": "https: "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", "dev": true, "engines": { "node": ">=10" } }, "node_modules/ci-info": { "version": "3.9.0", "resolved": "https: "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", "dev": true, "funding": [ { "type": "github", "url": "https: } ], "engines": { "node": ">=8" } }, "node_modules/cjs-module-lexer": { "version": "1.2.3", "resolved": "https: "integrity": "sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ==", "dev": true }, "node_modules/cliui": { "version": "8.0.1", "resolved": "https: "integrity": "sha512-BSeNnyus75C4 "dev": true, "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" }, "engines": { "node": ">=12" } }, "node_modules/co": { "version": "4.6.0", "resolved": "https: "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", "dev": true, "engines": { "iojs": ">= 1.0.0", "node": ">= 0.12.0" } }, "node_modules/collect-v8-coverage": { "version": "1.0.2", "resolved": "https: "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", "dev": true }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https: "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "dependencies": { "color-name": "~1.1.4" }, "engines": { "node": ">=7.0.0" } }, "node_modules/color-name": { "version": "1.1.4", "resolved": "https: "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https: "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, "engines": { "node": ">= 0.8" } }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https: "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "dev": true }, "node_modules/convert-source-map": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", "dev": true }, "node_modules/core-js-compat": { "version": "3.33.0", "resolved": "https: "integrity": "sha512-0w4LcLXsVEuNkIqwjjf9rjCoPhK8uqA4tMRh4Ge26vfLtUutshn+aRJU21I9LCJlh2QQHfisNToLjw1XEJLTWw==", "dev": true, "dependencies": { "browserslist": "^4.22.1" }, "funding": { "type": "opencollective", "url": "https: } }, "node_modules/create-jest": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", "dev": true, "dependencies": { "@jest/types": "^29.6.3", "chalk": "^4.0.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "jest-config": "^29.7.0", "jest-util": "^29.7.0", "prompts": "^2.0.1" }, "bin": { "create-jest": "bin/create-jest.js" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https: "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dev": true, "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" }, "engines": { "node": ">= 8" } }, "node_modules/cssom": { "version": "0.5.0", "resolved": "https: "integrity": "sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw==", "dev": true }, "node_modules/cssstyle": { "version": "2.3.0", "resolved": "https: "integrity": "sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==", "dev": true, "dependencies": { "cssom": "~0.3.6" }, "engines": { "node": ">=8" } }, "node_modules/cssstyle/node_modules/cssom": { "version": "0.3.8", "resolved": "https: "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", "dev": true }, "node_modules/data-urls": { "version": "3.0.2", "resolved": "https: "integrity": "sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==", "dev": true, "dependencies": { "abab": "^2.0.6", "whatwg-mimetype": "^3.0.0", "whatwg-url": "^11.0.0" }, "engines": { "node": ">=12" } }, "node_modules/debug": { "version": "4.3.4", "resolved": "https: "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dev": true, "dependencies": { "ms": "2.1.2" }, "engines": { "node": ">=6.0" }, "peerDependenciesMeta": { "supports-color": { "optional": true } } }, "node_modules/decimal.js": { "version": "10.4.3", "resolved": "https: "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", "dev": true }, "node_modules/dedent": { "version": "1.5.1", "resolved": "https: "integrity": "sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg==", "dev": true, "peerDependencies": { "babel-plugin-macros": "^3.1.0" }, "peerDependenciesMeta": { "babel-plugin-macros": { "optional": true } } }, "node_modules/deepmerge": { "version": "4.3.1", "resolved": "https: "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", "dev": true, "engines": { "node": ">=0.10.0" } }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https: "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", "dev": true, "engines": { "node": ">=0.4.0" } }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https: "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/diff-sequences": { "version": "29.6.3", "resolved": "https: "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", "dev": true, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/domexception": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==", "dev": true, "dependencies": { "webidl-conversions": "^7.0.0" }, "engines": { "node": ">=12" } }, "node_modules/electron-to-chromium": { "version": "1.4.544", "resolved": "https: "integrity": "sha512-54z7squS1FyFRSUqq/knOFSptjjogLZXbKcYk3B0qkE1KZzvqASwRZnY2KzZQJqIYLVD38XZeoiMRflYSwyO4w==", "dev": true }, "node_modules/emittery": { "version": "0.13.1", "resolved": "https: "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", "dev": true, "engines": { "node": ">=12" }, "funding": { "url": "https: } }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https: "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true }, "node_modules/entities": { "version": "4.5.0", "resolved": "https: "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", "dev": true, "engines": { "node": ">=0.12" }, "funding": { "url": "https: } }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https: "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4 "dev": true, "dependencies": { "is-arrayish": "^0.2.1" } }, "node_modules/escalade": { "version": "3.1.1", "resolved": "https: "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/escape-string-regexp": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/escodegen": { "version": "2.1.0", "resolved": "https: "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", "dev": true, "dependencies": { "esprima": "^4.0.1", "estraverse": "^5.2.0", "esutils": "^2.0.2" }, "bin": { "escodegen": "bin/escodegen.js", "esgenerate": "bin/esgenerate.js" }, "engines": { "node": ">=6.0" }, "optionalDependencies": { "source-map": "~0.6.1" } }, "node_modules/esprima": { "version": "4.0.1", "resolved": "https: "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", "dev": true, "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" }, "engines": { "node": ">=4" } }, "node_modules/estraverse": { "version": "5.3.0", "resolved": "https: "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, "engines": { "node": ">=4.0" } }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https: "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true, "engines": { "node": ">=0.10.0" } }, "node_modules/execa": { "version": "5.1.1", "resolved": "https: "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dev": true, "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^4.0.1", "onetime": "^5.1.2", "signal-exit": "^3.0.3", "strip-final-newline": "^2.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/exit": { "version": "0.1.2", "resolved": "https: "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", "dev": true, "engines": { "node": ">= 0.8.0" } }, "node_modules/expect": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", "dev": true, "dependencies": { "@jest/expect-utils": "^29.7.0", "jest-get-type": "^29.6.3", "jest-matcher-utils": "^29.7.0", "jest-message-util": "^29.7.0", "jest-util": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https: "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true }, "node_modules/fb-watchman": { "version": "2.0.2", "resolved": "https: "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", "dev": true, "dependencies": { "bser": "2.1.1" } }, "node_modules/fill-range": { "version": "7.0.1", "resolved": "https: "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" }, "engines": { "node": ">=8" } }, "node_modules/find-up": { "version": "4.1.0", "resolved": "https: "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/form-data": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", "dev": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" }, "engines": { "node": ">= 6" } }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https: "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https: "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, "hasInstallScript": true, "optional": true, "os": [ "darwin" ], "engines": { "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, "node_modules/gensync": { "version": "1.0.0-beta.2", "resolved": "https: "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https: "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true, "engines": { "node": "6.* || 8.* || >= 10.*" } }, "node_modules/get-package-type": { "version": "0.1.0", "resolved": "https: "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true, "engines": { "node": ">=8.0.0" } }, "node_modules/get-stream": { "version": "6.0.1", "resolved": "https: "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "dev": true, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/glob": { "version": "7.2.3", "resolved": "https: "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "dev": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" }, "engines": { "node": "*" }, "funding": { "url": "https: } }, "node_modules/globals": { "version": "11.12.0", "resolved": "https: "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "dev": true, "engines": { "node": ">=4" } }, "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https: "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "dev": true }, "node_modules/has": { "version": "1.0.4", "resolved": "https: "integrity": "sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ==", "dev": true, "engines": { "node": ">= 0.4.0" } }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/html-encoding-sniffer": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==", "dev": true, "dependencies": { "whatwg-encoding": "^2.0.0" }, "engines": { "node": ">=12" } }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https: "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", "dev": true }, "node_modules/http-proxy-agent": { "version": "5.0.0", "resolved": "https: "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", "dev": true, "dependencies": { "@tootallnate/once": "2", "agent-base": "6", "debug": "4" }, "engines": { "node": ">= 6" } }, "node_modules/https-proxy-agent": { "version": "5.0.1", "resolved": "https: "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "dev": true, "dependencies": { "agent-base": "6", "debug": "4" }, "engines": { "node": ">= 6" } }, "node_modules/human-signals": { "version": "2.1.0", "resolved": "https: "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "dev": true, "engines": { "node": ">=10.17.0" } }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https: "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", "dev": true, "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" }, "engines": { "node": ">=0.10.0" } }, "node_modules/import-local": { "version": "3.1.0", "resolved": "https: "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", "dev": true, "dependencies": { "pkg-dir": "^4.2.0", "resolve-cwd": "^3.0.0" }, "bin": { "import-local-fixture": "fixtures/cli.js" }, "engines": { "node": ">=8" }, "funding": { "url": "https: } }, "node_modules/imurmurhash": { "version": "0.1.4", "resolved": "https: "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, "engines": { "node": ">=0.8.19" } }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https: "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "dev": true, "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https: "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true }, "node_modules/is-arrayish": { "version": "0.2.1", "resolved": "https: "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true }, "node_modules/is-core-module": { "version": "2.13.0", "resolved": "https: "integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==", "dev": true, "dependencies": { "has": "^1.0.3" }, "funding": { "url": "https: } }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/is-generator-fn": { "version": "2.1.0", "resolved": "https: "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https: "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true, "engines": { "node": ">=0.12.0" } }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", "resolved": "https: "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, "node_modules/is-stream": { "version": "2.0.1", "resolved": "https: "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "dev": true, "engines": { "node": ">=8" }, "funding": { "url": "https: } }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, "node_modules/istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https: "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/istanbul-lib-instrument": { "version": "6.0.1", "resolved": "https: "integrity": "sha512-EAMEJBsYuyyztxMxW3g7ugGPkrZsV57v0Hmv3mm1uQsmB+QnZuepg731CRaIgeUVSdmsTngOkSnauNF8p7FIhA==", "dev": true, "dependencies": { "@babel/core": "^7.12.3", "@babel/parser": "^7.14.7", "@istanbuljs/schema": "^0.1.2", "istanbul-lib-coverage": "^3.2.0", "semver": "^7.5.4" }, "engines": { "node": ">=10" } }, "node_modules/istanbul-lib-instrument/node_modules/lru-cache": { "version": "6.0.0", "resolved": "https: "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, "dependencies": { "yallist": "^4.0.0" }, "engines": { "node": ">=10" } }, "node_modules/istanbul-lib-instrument/node_modules/semver": { "version": "7.5.4", "resolved": "https: "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" }, "bin": { "semver": "bin/semver.js" }, "engines": { "node": ">=10" } }, "node_modules/istanbul-lib-instrument/node_modules/yallist": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true }, "node_modules/istanbul-lib-report": { "version": "3.0.1", "resolved": "https: "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", "dev": true, "dependencies": { "istanbul-lib-coverage": "^3.0.0", "make-dir": "^4.0.0", "supports-color": "^7.1.0" }, "engines": { "node": ">=10" } }, "node_modules/istanbul-lib-source-maps": { "version": "4.0.1", "resolved": "https: "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", "dev": true, "dependencies": { "debug": "^4.1.1", "istanbul-lib-coverage": "^3.0.0", "source-map": "^0.6.1" }, "engines": { "node": ">=10" } }, "node_modules/istanbul-reports": { "version": "3.1.6", "resolved": "https: "integrity": "sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg==", "dev": true, "dependencies": { "html-escaper": "^2.0.0", "istanbul-lib-report": "^3.0.0" }, "engines": { "node": ">=8" } }, "node_modules/jest": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", "dev": true, "dependencies": { "@jest/core": "^29.7.0", "@jest/types": "^29.6.3", "import-local": "^3.0.2", "jest-cli": "^29.7.0" }, "bin": { "jest": "bin/jest.js" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "peerDependenciesMeta": { "node-notifier": { "optional": true } } }, "node_modules/jest-changed-files": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", "dev": true, "dependencies": { "execa": "^5.0.0", "jest-util": "^29.7.0", "p-limit": "^3.1.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-circus": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", "dev": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/expect": "^29.7.0", "@jest/test-result": "^29.7.0", "@jest/types": "^29.6.3", "@types/node": "*", "chalk": "^4.0.0", "co": "^4.6.0", "dedent": "^1.0.0", "is-generator-fn": "^2.0.0", "jest-each": "^29.7.0", "jest-matcher-utils": "^29.7.0", "jest-message-util": "^29.7.0", "jest-runtime": "^29.7.0", "jest-snapshot": "^29.7.0", "jest-util": "^29.7.0", "p-limit": "^3.1.0", "pretty-format": "^29.7.0", "pure-rand": "^6.0.0", "slash": "^3.0.0", "stack-utils": "^2.0.3" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-cli": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", "dev": true, "dependencies": { "@jest/core": "^29.7.0", "@jest/test-result": "^29.7.0", "@jest/types": "^29.6.3", "chalk": "^4.0.0", "create-jest": "^29.7.0", "exit": "^0.1.2", "import-local": "^3.0.2", "jest-config": "^29.7.0", "jest-util": "^29.7.0", "jest-validate": "^29.7.0", "yargs": "^17.3.1" }, "bin": { "jest": "bin/jest.js" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "peerDependenciesMeta": { "node-notifier": { "optional": true } } }, "node_modules/jest-config": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", "dev": true, "dependencies": { "@babel/core": "^7.11.6", "@jest/test-sequencer": "^29.7.0", "@jest/types": "^29.6.3", "babel-jest": "^29.7.0", "chalk": "^4.0.0", "ci-info": "^3.2.0", "deepmerge": "^4.2.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "jest-circus": "^29.7.0", "jest-environment-node": "^29.7.0", "jest-get-type": "^29.6.3", "jest-regex-util": "^29.6.3", "jest-resolve": "^29.7.0", "jest-runner": "^29.7.0", "jest-util": "^29.7.0", "jest-validate": "^29.7.0", "micromatch": "^4.0.4", "parse-json": "^5.2.0", "pretty-format": "^29.7.0", "slash": "^3.0.0", "strip-json-comments": "^3.1.1" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { "@types/node": "*", "ts-node": ">=9.0.0" }, "peerDependenciesMeta": { "@types/node": { "optional": true }, "ts-node": { "optional": true } } }, "node_modules/jest-diff": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", "dev": true, "dependencies": { "chalk": "^4.0.0", "diff-sequences": "^29.6.3", "jest-get-type": "^29.6.3", "pretty-format": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-docblock": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", "dev": true, "dependencies": { "detect-newline": "^3.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-each": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", "dev": true, "dependencies": { "@jest/types": "^29.6.3", "chalk": "^4.0.0", "jest-get-type": "^29.6.3", "jest-util": "^29.7.0", "pretty-format": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-environment-jsdom": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-k9iQbsf9OyOfdzWH8HDmrRT0gSIcX+FLNW7IQq94tFX0gynPwqDTW0Ho6iMVNjGz/nb+l/vW3dWM2bbLLpkbXA==", "dev": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/fake-timers": "^29.7.0", "@jest/types": "^29.6.3", "@types/jsdom": "^20.0.0", "@types/node": "*", "jest-mock": "^29.7.0", "jest-util": "^29.7.0", "jsdom": "^20.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { "canvas": "^2.5.0" }, "peerDependenciesMeta": { "canvas": { "optional": true } } }, "node_modules/jest-environment-node": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", "dev": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/fake-timers": "^29.7.0", "@jest/types": "^29.6.3", "@types/node": "*", "jest-mock": "^29.7.0", "jest-util": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-get-type": { "version": "29.6.3", "resolved": "https: "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", "dev": true, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-haste-map": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", "dev": true, "dependencies": { "@jest/types": "^29.6.3", "@types/graceful-fs": "^4.1.3", "@types/node": "*", "anymatch": "^3.0.3", "fb-watchman": "^2.0.0", "graceful-fs": "^4.2.9", "jest-regex-util": "^29.6.3", "jest-util": "^29.7.0", "jest-worker": "^29.7.0", "micromatch": "^4.0.4", "walker": "^1.0.8" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "optionalDependencies": { "fsevents": "^2.3.2" } }, "node_modules/jest-leak-detector": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", "dev": true, "dependencies": { "jest-get-type": "^29.6.3", "pretty-format": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-matcher-utils": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", "dev": true, "dependencies": { "chalk": "^4.0.0", "jest-diff": "^29.7.0", "jest-get-type": "^29.6.3", "pretty-format": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-message-util": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", "dev": true, "dependencies": { "@babel/code-frame": "^7.12.13", "@jest/types": "^29.6.3", "@types/stack-utils": "^2.0.0", "chalk": "^4.0.0", "graceful-fs": "^4.2.9", "micromatch": "^4.0.4", "pretty-format": "^29.7.0", "slash": "^3.0.0", "stack-utils": "^2.0.3" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-mock": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", "dev": true, "dependencies": { "@jest/types": "^29.6.3", "@types/node": "*", "jest-util": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-pnp-resolver": { "version": "1.2.3", "resolved": "https: "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", "dev": true, "engines": { "node": ">=6" }, "peerDependencies": { "jest-resolve": "*" }, "peerDependenciesMeta": { "jest-resolve": { "optional": true } } }, "node_modules/jest-regex-util": { "version": "29.6.3", "resolved": "https: "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", "dev": true, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-resolve": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", "dev": true, "dependencies": { "chalk": "^4.0.0", "graceful-fs": "^4.2.9", "jest-haste-map": "^29.7.0", "jest-pnp-resolver": "^1.2.2", "jest-util": "^29.7.0", "jest-validate": "^29.7.0", "resolve": "^1.20.0", "resolve.exports": "^2.0.0", "slash": "^3.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-resolve-dependencies": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", "dev": true, "dependencies": { "jest-regex-util": "^29.6.3", "jest-snapshot": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-runner": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", "dev": true, "dependencies": { "@jest/console": "^29.7.0", "@jest/environment": "^29.7.0", "@jest/test-result": "^29.7.0", "@jest/transform": "^29.7.0", "@jest/types": "^29.6.3", "@types/node": "*", "chalk": "^4.0.0", "emittery": "^0.13.1", "graceful-fs": "^4.2.9", "jest-docblock": "^29.7.0", "jest-environment-node": "^29.7.0", "jest-haste-map": "^29.7.0", "jest-leak-detector": "^29.7.0", "jest-message-util": "^29.7.0", "jest-resolve": "^29.7.0", "jest-runtime": "^29.7.0", "jest-util": "^29.7.0", "jest-watcher": "^29.7.0", "jest-worker": "^29.7.0", "p-limit": "^3.1.0", "source-map-support": "0.5.13" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-runtime": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", "dev": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/fake-timers": "^29.7.0", "@jest/globals": "^29.7.0", "@jest/source-map": "^29.6.3", "@jest/test-result": "^29.7.0", "@jest/transform": "^29.7.0", "@jest/types": "^29.6.3", "@types/node": "*", "chalk": "^4.0.0", "cjs-module-lexer": "^1.0.0", "collect-v8-coverage": "^1.0.0", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "jest-haste-map": "^29.7.0", "jest-message-util": "^29.7.0", "jest-mock": "^29.7.0", "jest-regex-util": "^29.6.3", "jest-resolve": "^29.7.0", "jest-snapshot": "^29.7.0", "jest-util": "^29.7.0", "slash": "^3.0.0", "strip-bom": "^4.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-snapshot": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", "dev": true, "dependencies": { "@babel/core": "^7.11.6", "@babel/generator": "^7.7.2", "@babel/plugin-syntax-jsx": "^7.7.2", "@babel/plugin-syntax-typescript": "^7.7.2", "@babel/types": "^7.3.3", "@jest/expect-utils": "^29.7.0", "@jest/transform": "^29.7.0", "@jest/types": "^29.6.3", "babel-preset-current-node-syntax": "^1.0.0", "chalk": "^4.0.0", "expect": "^29.7.0", "graceful-fs": "^4.2.9", "jest-diff": "^29.7.0", "jest-get-type": "^29.6.3", "jest-matcher-utils": "^29.7.0", "jest-message-util": "^29.7.0", "jest-util": "^29.7.0", "natural-compare": "^1.4.0", "pretty-format": "^29.7.0", "semver": "^7.5.3" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-snapshot/node_modules/lru-cache": { "version": "6.0.0", "resolved": "https: "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, "dependencies": { "yallist": "^4.0.0" }, "engines": { "node": ">=10" } }, "node_modules/jest-snapshot/node_modules/semver": { "version": "7.5.4", "resolved": "https: "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" }, "bin": { "semver": "bin/semver.js" }, "engines": { "node": ">=10" } }, "node_modules/jest-snapshot/node_modules/yallist": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true }, "node_modules/jest-util": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", "dev": true, "dependencies": { "@jest/types": "^29.6.3", "@types/node": "*", "chalk": "^4.0.0", "ci-info": "^3.2.0", "graceful-fs": "^4.2.9", "picomatch": "^2.2.3" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-validate": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", "dev": true, "dependencies": { "@jest/types": "^29.6.3", "camelcase": "^6.2.0", "chalk": "^4.0.0", "jest-get-type": "^29.6.3", "leven": "^3.1.0", "pretty-format": "^29.7.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-validate/node_modules/camelcase": { "version": "6.3.0", "resolved": "https: "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "dev": true, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/jest-watcher": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", "dev": true, "dependencies": { "@jest/test-result": "^29.7.0", "@jest/types": "^29.6.3", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "emittery": "^0.13.1", "jest-util": "^29.7.0", "string-length": "^4.0.1" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-worker": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", "dev": true, "dependencies": { "@types/node": "*", "jest-util": "^29.7.0", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-worker/node_modules/supports-color": { "version": "8.1.1", "resolved": "https: "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, "dependencies": { "has-flag": "^4.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", "dev": true }, "node_modules/js-yaml": { "version": "3.14.1", "resolved": "https: "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dev": true, "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "node_modules/jsdom": { "version": "20.0.3", "resolved": "https: "integrity": "sha512-SYhBvTh89tTfCD/CRdSOm13mOBa42iTaTyfyEWBdKcGdPxPtLFBXuHR8XHb33YNYaP+lLbmSvBTsnoesCNJEsQ==", "dev": true, "dependencies": { "abab": "^2.0.6", "acorn": "^8.8.1", "acorn-globals": "^7.0.0", "cssom": "^0.5.0", "cssstyle": "^2.3.0", "data-urls": "^3.0.2", "decimal.js": "^10.4.2", "domexception": "^4.0.0", "escodegen": "^2.0.0", "form-data": "^4.0.0", "html-encoding-sniffer": "^3.0.0", "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.1", "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.2", "parse5": "^7.1.1", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", "tough-cookie": "^4.1.2", "w3c-xmlserializer": "^4.0.0", "webidl-conversions": "^7.0.0", "whatwg-encoding": "^2.0.0", "whatwg-mimetype": "^3.0.0", "whatwg-url": "^11.0.0", "ws": "^8.11.0", "xml-name-validator": "^4.0.0" }, "engines": { "node": ">=14" }, "peerDependencies": { "canvas": "^2.5.0" }, "peerDependenciesMeta": { "canvas": { "optional": true } } }, "node_modules/jsesc": { "version": "2.5.2", "resolved": "https: "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "dev": true, "bin": { "jsesc": "bin/jsesc" }, "engines": { "node": ">=4" } }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https: "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true }, "node_modules/json5": { "version": "2.2.3", "resolved": "https: "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", "dev": true, "bin": { "json5": "lib/cli.js" }, "engines": { "node": ">=6" } }, "node_modules/kleur": { "version": "3.0.3", "resolved": "https: "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/leven": { "version": "3.1.0", "resolved": "https: "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/lines-and-columns": { "version": "1.2.4", "resolved": "https: "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", "dev": true }, "node_modules/locate-path": { "version": "5.0.0", "resolved": "https: "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "dependencies": { "p-locate": "^4.1.0" }, "engines": { "node": ">=8" } }, "node_modules/lodash.debounce": { "version": "4.0.8", "resolved": "https: "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", "dev": true }, "node_modules/lru-cache": { "version": "5.1.1", "resolved": "https: "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, "dependencies": { "yallist": "^3.0.2" } }, "node_modules/make-dir": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-hXdUTZYIVOt1Ex "dev": true, "dependencies": { "semver": "^7.5.3" }, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/make-dir/node_modules/lru-cache": { "version": "6.0.0", "resolved": "https: "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, "dependencies": { "yallist": "^4.0.0" }, "engines": { "node": ">=10" } }, "node_modules/make-dir/node_modules/semver": { "version": "7.5.4", "resolved": "https: "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" }, "bin": { "semver": "bin/semver.js" }, "engines": { "node": ">=10" } }, "node_modules/make-dir/node_modules/yallist": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true }, "node_modules/makeerror": { "version": "1.0.12", "resolved": "https: "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", "dev": true, "dependencies": { "tmpl": "1.0.5" } }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", "dev": true }, "node_modules/micromatch": { "version": "4.0.5", "resolved": "https: "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", "dev": true, "dependencies": { "braces": "^3.0.2", "picomatch": "^2.3.1" }, "engines": { "node": ">=8.6" } }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https: "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", "dev": true, "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { "version": "2.1.35", "resolved": "https: "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "dev": true, "dependencies": { "mime-db": "1.52.0" }, "engines": { "node": ">= 0.6" } }, "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https: "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https: "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "dependencies": { "brace-expansion": "^1.1.7" }, "engines": { "node": "*" } }, "node_modules/ms": { "version": "2.1.2", "resolved": "https: "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https: "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, "node_modules/node-int64": { "version": "0.4.0", "resolved": "https: "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", "dev": true }, "node_modules/node-releases": { "version": "2.0.13", "resolved": "https: "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", "dev": true }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "dev": true, "engines": { "node": ">=0.10.0" } }, "node_modules/npm-run-path": { "version": "4.0.1", "resolved": "https: "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dev": true, "dependencies": { "path-key": "^3.0.0" }, "engines": { "node": ">=8" } }, "node_modules/nwsapi": { "version": "2.2.7", "resolved": "https: "integrity": "sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ==", "dev": true }, "node_modules/once": { "version": "1.4.0", "resolved": "https: "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "dev": true, "dependencies": { "wrappy": "1" } }, "node_modules/onetime": { "version": "5.1.2", "resolved": "https: "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dev": true, "dependencies": { "mimic-fn": "^2.1.0" }, "engines": { "node": ">=6" }, "funding": { "url": "https: } }, "node_modules/p-limit": { "version": "3.1.0", "resolved": "https: "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, "dependencies": { "yocto-queue": "^0.1.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/p-locate": { "version": "4.1.0", "resolved": "https: "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, "dependencies": { "p-limit": "^2.2.0" }, "engines": { "node": ">=8" } }, "node_modules/p-locate/node_modules/p-limit": { "version": "2.3.0", "resolved": "https: "integrity": "sha512- "dev": true, "dependencies": { "p-try": "^2.0.0" }, "engines": { "node": ">=6" }, "funding": { "url": "https: } }, "node_modules/p-try": { "version": "2.2.0", "resolved": "https: "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/parse-json": { "version": "5.2.0", "resolved": "https: "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", "dev": true, "dependencies": { "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", "json-parse-even-better-errors": "^2.3.0", "lines-and-columns": "^1.1.6" }, "engines": { "node": ">=8" }, "funding": { "url": "https: } }, "node_modules/parse5": { "version": "7.1.2", "resolved": "https: "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", "dev": true, "dependencies": { "entities": "^4.4.0" }, "funding": { "url": "https: } }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https: "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", "dev": true, "engines": { "node": ">=0.10.0" } }, "node_modules/path-key": { "version": "3.1.1", "resolved": "https: "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/path-parse": { "version": "1.0.7", "resolved": "https: "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, "node_modules/picocolors": { "version": "1.0.0", "resolved": "https: "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", "dev": true }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https: "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, "engines": { "node": ">=8.6" }, "funding": { "url": "https: } }, "node_modules/pirates": { "version": "4.0.6", "resolved": "https: "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", "dev": true, "engines": { "node": ">= 6" } }, "node_modules/pkg-dir": { "version": "4.2.0", "resolved": "https: "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "dev": true, "dependencies": { "find-up": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/pretty-format": { "version": "29.7.0", "resolved": "https: "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", "dev": true, "dependencies": { "@jest/schemas": "^29.6.3", "ansi-styles": "^5.0.0", "react-is": "^18.0.0" }, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/pretty-format/node_modules/ansi-styles": { "version": "5.2.0", "resolved": "https: "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", "dev": true, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/prompts": { "version": "2.4.2", "resolved": "https: "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", "dev": true, "dependencies": { "kleur": "^3.0.3", "sisteransi": "^1.0.5" }, "engines": { "node": ">= 6" } }, "node_modules/psl": { "version": "1.9.0", "resolved": "https: "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", "dev": true }, "node_modules/punycode": { "version": "2.3.0", "resolved": "https: "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/pure-rand": { "version": "6.0.4", "resolved": "https: "integrity": "sha512-LA0Y9kxMYv47GIPJy6MI84fqTd2HmYZI83W/kM/SkKfDlajnZYfmXFTxkbY+xSBPkLJxltMa9hIkmdc29eguMA==", "dev": true, "funding": [ { "type": "individual", "url": "https: }, { "type": "opencollective", "url": "https: } ] }, "node_modules/querystringify": { "version": "2.2.0", "resolved": "https: "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", "dev": true }, "node_modules/react-is": { "version": "18.2.0", "resolved": "https: "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", "dev": true }, "node_modules/regenerate": { "version": "1.4.2", "resolved": "https: "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", "dev": true }, "node_modules/regenerate-unicode-properties": { "version": "10.1.1", "resolved": "https: "integrity": "sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q==", "dev": true, "dependencies": { "regenerate": "^1.4.2" }, "engines": { "node": ">=4" } }, "node_modules/regenerator-runtime": { "version": "0.14.0", "resolved": "https: "integrity": "sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==", "dev": true }, "node_modules/regenerator-transform": { "version": "0.15.2", "resolved": "https: "integrity": "sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg==", "dev": true, "dependencies": { "@babel/runtime": "^7.8.4" } }, "node_modules/regexpu-core": { "version": "5.3.2", "resolved": "https: "integrity": "sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==", "dev": true, "dependencies": { "@babel/regjsgen": "^0.8.0", "regenerate": "^1.4.2", "regenerate-unicode-properties": "^10.1.0", "regjsparser": "^0.9.1", "unicode-match-property-ecmascript": "^2.0.0", "unicode-match-property-value-ecmascript": "^2.1.0" }, "engines": { "node": ">=4" } }, "node_modules/regjsparser": { "version": "0.9.1", "resolved": "https: "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", "dev": true, "dependencies": { "jsesc": "~0.5.0" }, "bin": { "regjsparser": "bin/parser" } }, "node_modules/regjsparser/node_modules/jsesc": { "version": "0.5.0", "resolved": "https: "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", "dev": true, "bin": { "jsesc": "bin/jsesc" } }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https: "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true, "engines": { "node": ">=0.10.0" } }, "node_modules/requires-port": { "version": "1.0.0", "resolved": "https: "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", "dev": true }, "node_modules/resolve": { "version": "1.22.6", "resolved": "https: "integrity": "sha512-njhxM7mV12JfufShqGy3Rz8j11RPdLy4xi15UurGJeoHLfJpVXKdh3ueuOqbYUcDZnffr6X739JBo5LzyahEsw==", "dev": true, "dependencies": { "is-core-module": "^2.13.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" }, "funding": { "url": "https: } }, "node_modules/resolve-cwd": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", "dev": true, "dependencies": { "resolve-from": "^5.0.0" }, "engines": { "node": ">=8" } }, "node_modules/resolve-from": { "version": "5.0.0", "resolved": "https: "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/resolve.exports": { "version": "2.0.2", "resolved": "https: "integrity": "sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==", "dev": true, "engines": { "node": ">=10" } }, "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https: "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "dev": true }, "node_modules/saxes": { "version": "6.0.0", "resolved": "https: "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==", "dev": true, "dependencies": { "xmlchars": "^2.2.0" }, "engines": { "node": ">=v12.22.7" } }, "node_modules/semver": { "version": "6.3.1", "resolved": "https: "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, "bin": { "semver": "bin/semver.js" } }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dev": true, "dependencies": { "shebang-regex": "^3.0.0" }, "engines": { "node": ">=8" } }, "node_modules/shebang-regex": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/signal-exit": { "version": "3.0.7", "resolved": "https: "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "dev": true }, "node_modules/sisteransi": { "version": "1.0.5", "resolved": "https: "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", "dev": true }, "node_modules/slash": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/source-map": { "version": "0.6.1", "resolved": "https: "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true, "engines": { "node": ">=0.10.0" } }, "node_modules/source-map-support": { "version": "0.5.13", "resolved": "https: "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", "dev": true, "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https: "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", "dev": true }, "node_modules/stack-utils": { "version": "2.0.6", "resolved": "https: "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", "dev": true, "dependencies": { "escape-string-regexp": "^2.0.0" }, "engines": { "node": ">=10" } }, "node_modules/string-length": { "version": "4.0.2", "resolved": "https: "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", "dev": true, "dependencies": { "char-regex": "^1.0.2", "strip-ansi": "^6.0.0" }, "engines": { "node": ">=10" } }, "node_modules/string-width": { "version": "4.2.3", "resolved": "https: "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" }, "engines": { "node": ">=8" } }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https: "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, "engines": { "node": ">=8" } }, "node_modules/strip-bom": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/strip-final-newline": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", "dev": true, "engines": { "node": ">=6" } }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https: "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true, "engines": { "node": ">=8" }, "funding": { "url": "https: } }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https: "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/supports-preserve-symlinks-flag": { "version": "1.0.0", "resolved": "https: "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", "dev": true, "engines": { "node": ">= 0.4" }, "funding": { "url": "https: } }, "node_modules/symbol-tree": { "version": "3.2.4", "resolved": "https: "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", "dev": true }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https: "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", "dev": true, "dependencies": { "@istanbuljs/schema": "^0.1.2", "glob": "^7.1.4", "minimatch": "^3.0.4" }, "engines": { "node": ">=8" } }, "node_modules/tmpl": { "version": "1.0.5", "resolved": "https: "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", "dev": true }, "node_modules/to-fast-properties": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", "dev": true, "engines": { "node": ">=4" } }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https: "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dev": true, "dependencies": { "is-number": "^7.0.0" }, "engines": { "node": ">=8.0" } }, "node_modules/tough-cookie": { "version": "4.1.3", "resolved": "https: "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "dev": true, "dependencies": { "psl": "^1.1.33", "punycode": "^2.1.1", "universalify": "^0.2.0", "url-parse": "^1.5.3" }, "engines": { "node": ">=6" } }, "node_modules/tr46": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", "dev": true, "dependencies": { "punycode": "^2.1.1" }, "engines": { "node": ">=12" } }, "node_modules/tslib": { "version": "2.6.2", "resolved": "https: "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", "dev": true }, "node_modules/type-detect": { "version": "4.0.8", "resolved": "https: "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", "dev": true, "engines": { "node": ">=4" } }, "node_modules/type-fest": { "version": "0.21.3", "resolved": "https: "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", "dev": true, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/unicode-canonical-property-names-ecmascript": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", "dev": true, "engines": { "node": ">=4" } }, "node_modules/unicode-match-property-ecmascript": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", "dev": true, "dependencies": { "unicode-canonical-property-names-ecmascript": "^2.0.0", "unicode-property-aliases-ecmascript": "^2.0.0" }, "engines": { "node": ">=4" } }, "node_modules/unicode-match-property-value-ecmascript": { "version": "2.1.0", "resolved": "https: "integrity": "sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==", "dev": true, "engines": { "node": ">=4" } }, "node_modules/unicode-property-aliases-ecmascript": { "version": "2.1.0", "resolved": "https: "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", "dev": true, "engines": { "node": ">=4" } }, "node_modules/universalify": { "version": "0.2.0", "resolved": "https: "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", "dev": true, "engines": { "node": ">= 4.0.0" } }, "node_modules/update-browserslist-db": { "version": "1.0.13", "resolved": "https: "integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==", "dev": true, "funding": [ { "type": "opencollective", "url": "https: }, { "type": "tidelift", "url": "https: }, { "type": "github", "url": "https: } ], "dependencies": { "escalade": "^3.1.1", "picocolors": "^1.0.0" }, "bin": { "update-browserslist-db": "cli.js" }, "peerDependencies": { "browserslist": ">= 4.21.0" } }, "node_modules/url-parse": { "version": "1.5.10", "resolved": "https: "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", "dev": true, "dependencies": { "querystringify": "^2.1.1", "requires-port": "^1.0.0" } }, "node_modules/v8-to-istanbul": { "version": "9.1.3", "resolved": "https: "integrity": "sha512-9lDD+EVI2fjFsMWXc6dy5JJzBsVTcQ2fVkfBvncZ6xJWG9wtBhOldG+mHkSL0+V1K/xgZz0JDO5UT5hFwHUghg==", "dev": true, "dependencies": { "@jridgewell/trace-mapping": "^0.3.12", "@types/istanbul-lib-coverage": "^2.0.1", "convert-source-map": "^2.0.0" }, "engines": { "node": ">=10.12.0" } }, "node_modules/w3c-xmlserializer": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==", "dev": true, "dependencies": { "xml-name-validator": "^4.0.0" }, "engines": { "node": ">=14" } }, "node_modules/walker": { "version": "1.0.8", "resolved": "https: "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", "dev": true, "dependencies": { "makeerror": "1.0.12" } }, "node_modules/webidl-conversions": { "version": "7.0.0", "resolved": "https: "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", "dev": true, "engines": { "node": ">=12" } }, "node_modules/whatwg-encoding": { "version": "2.0.0", "resolved": "https: "integrity": "sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==", "dev": true, "dependencies": { "iconv-lite": "0.6.3" }, "engines": { "node": ">=12" } }, "node_modules/whatwg-mimetype": { "version": "3.0.0", "resolved": "https: "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==", "dev": true, "engines": { "node": ">=12" } }, "node_modules/whatwg-url": { "version": "11.0.0", "resolved": "https: "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", "dev": true, "dependencies": { "tr46": "^3.0.0", "webidl-conversions": "^7.0.0" }, "engines": { "node": ">=12" } }, "node_modules/which": { "version": "2.0.2", "resolved": "https: "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dev": true, "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/node-which" }, "engines": { "node": ">= 8" } }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https: "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" }, "engines": { "node": ">=10" }, "funding": { "url": "https: } }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https: "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "dev": true }, "node_modules/write-file-atomic": { "version": "4.0.2", "resolved": "https: "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", "dev": true, "dependencies": { "imurmurhash": "^0.1.4", "signal-exit": "^3.0.7" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, "node_modules/ws": { "version": "8.14.2", "resolved": "https: "integrity": "sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==", "dev": true, "engines": { "node": ">=10.0.0" }, "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "peerDependenciesMeta": { "bufferutil": { "optional": true }, "utf-8-validate": { "optional": true } } }, "node_modules/xml-name-validator": { "version": "4.0.0", "resolved": "https: "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==", "dev": true, "engines": { "node": ">=12" } }, "node_modules/xmlchars": { "version": "2.2.0", "resolved": "https: "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", "dev": true }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https: "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", "dev": true, "engines": { "node": ">=10" } }, "node_modules/yallist": { "version": "3.1.1", "resolved": "https: "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", "dev": true }, "node_modules/yargs": { "version": "17.7.2", "resolved": "https: "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", "dev": true, "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" }, "engines": { "node": ">=12" } }, "node_modules/yargs-parser": { "version": "21.1.1", "resolved": "https: "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "dev": true, "engines": { "node": ">=12" } }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https: "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, "engines": { "node": ">=10" }, "funding": { "url": "https: } } } }
{ "name": "comfui-tests", "version": "1.0.0", "description": "UI tests", "main": "index.js", "scripts": { "test": "jest", "test:generate": "node setup.js" }, "repository": { "type": "git", "url": "git+https: }, "keywords": [ "comfyui", "test" ], "author": "comfyanonymous", "license": "GPL-3.0", "bugs": { "url": "https: }, "homepage": "https: "devDependencies": { "@babel/preset-env": "^7.22.20", "@types/jest": "^29.5.5", "babel-plugin-transform-import-meta": "^2.2.1", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0" } }
{ "compilerOptions": { "baseUrl": ".", "paths": { "/*": ["./*"] }, "lib": ["DOM", "ES2022"] }, "include": ["."] }