repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/config.rs
src/config.rs
use std::{path::PathBuf, sync::Arc, time::Duration}; use lscolors::LsColors; use regex::bytes::RegexSet; use crate::exec::CommandSet; use crate::filetypes::FileTypes; #[cfg(unix)] use crate::filter::OwnerFilter; use crate::filter::{SizeFilter, TimeFilter}; use crate::fmt::FormatTemplate; /// Configuration options for *fd*. pub struct Config { /// Whether the search is case-sensitive or case-insensitive. pub case_sensitive: bool, /// Whether to search within the full file path or just the base name (filename or directory /// name). pub search_full_path: bool, /// Whether to ignore hidden files and directories (or not). pub ignore_hidden: bool, /// Whether to respect `.fdignore` files or not. pub read_fdignore: bool, /// Whether to respect ignore files in parent directories or not. pub read_parent_ignore: bool, /// Whether to respect VCS ignore files (`.gitignore`, ..) or not. pub read_vcsignore: bool, /// Whether to require a `.git` directory to respect gitignore files. pub require_git_to_read_vcsignore: bool, /// Whether to respect the global ignore file or not. pub read_global_ignore: bool, /// Whether to follow symlinks or not. pub follow_links: bool, /// Whether to limit the search to starting file system or not. pub one_file_system: bool, /// Whether elements of output should be separated by a null character pub null_separator: bool, /// The maximum search depth, or `None` if no maximum search depth should be set. /// /// A depth of `1` includes all files under the current directory, a depth of `2` also includes /// all files under subdirectories of the current directory, etc. pub max_depth: Option<usize>, /// The minimum depth for reported entries, or `None`. pub min_depth: Option<usize>, /// Whether to stop traversing into matching directories. pub prune: bool, /// The number of threads to use. pub threads: usize, /// If true, the program doesn't print anything and will instead return an exit code of 0 /// if there's at least one match. Otherwise, the exit code will be 1. pub quiet: bool, /// Time to buffer results internally before streaming to the console. This is useful to /// provide a sorted output, in case the total execution time is shorter than /// `max_buffer_time`. pub max_buffer_time: Option<Duration>, /// `None` if the output should not be colorized. Otherwise, a `LsColors` instance that defines /// how to style different filetypes. pub ls_colors: Option<LsColors>, /// Whether or not we are writing to an interactive terminal #[cfg_attr(not(unix), allow(unused))] pub interactive_terminal: bool, /// The type of file to search for. If set to `None`, all file types are displayed. If /// set to `Some(..)`, only the types that are specified are shown. pub file_types: Option<FileTypes>, /// The extension to search for. Only entries matching the extension will be included. /// /// The value (if present) will be a lowercase string without leading dots. pub extensions: Option<RegexSet>, /// A format string to use to format results, similarly to exec pub format: Option<FormatTemplate>, /// If a value is supplied, each item found will be used to generate and execute commands. pub command: Option<Arc<CommandSet>>, /// Maximum number of search results to pass to each `command`. If zero, the number is /// unlimited. pub batch_size: usize, /// A list of glob patterns that should be excluded from the search. pub exclude_patterns: Vec<String>, /// A list of custom ignore files. pub ignore_files: Vec<PathBuf>, /// The given constraints on the size of returned files pub size_constraints: Vec<SizeFilter>, /// Constraints on last modification time of files pub time_constraints: Vec<TimeFilter>, #[cfg(unix)] /// User/group ownership constraint pub owner_constraint: Option<OwnerFilter>, /// Whether or not to display filesystem errors pub show_filesystem_errors: bool, /// The separator used to print file paths. pub path_separator: Option<String>, /// The actual separator, either the system default separator or `path_separator` pub actual_path_separator: String, /// The maximum number of search results pub max_results: Option<usize>, /// Whether or not to strip the './' prefix for search results pub strip_cwd_prefix: bool, /// Whether or not to use hyperlinks on paths pub hyperlink: bool, } impl Config { /// Check whether results are being printed. pub fn is_printing(&self) -> bool { self.command.is_none() } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/hyperlink.rs
src/hyperlink.rs
use crate::filesystem::absolute_path; use std::fmt::{self, Formatter, Write}; use std::path::{Path, PathBuf}; pub(crate) struct PathUrl(PathBuf); impl PathUrl { pub(crate) fn new(path: &Path) -> Option<PathUrl> { Some(PathUrl(absolute_path(path).ok()?)) } } impl fmt::Display for PathUrl { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "file://{}", host())?; let bytes = self.0.as_os_str().as_encoded_bytes(); for &byte in bytes.iter() { encode(f, byte)?; } Ok(()) } } fn encode(f: &mut Formatter, byte: u8) -> fmt::Result { // NOTE: // Most terminals can handle non-ascii unicode characters in a file url fine. But on some OSes (notably // windows), the encoded bytes of the path may not be valid UTF-8. Since we don't know if a // byte >= 128 is part of a valid UTF-8 encoding or not, we just percent encode any non-ascii // byte. // Percent encoding these bytes is probably safer anyway. match byte { b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z' | b'/' | b':' | b'-' | b'.' | b'_' | b'~' => { f.write_char(byte.into()) } #[cfg(windows)] b'\\' => f.write_char('/'), _ => { write!(f, "%{byte:02X}") } } } #[cfg(unix)] fn host() -> &'static str { use std::sync::OnceLock; static HOSTNAME: OnceLock<String> = OnceLock::new(); HOSTNAME .get_or_init(|| { nix::unistd::gethostname() .ok() .and_then(|h| h.into_string().ok()) .unwrap_or_default() }) .as_ref() } #[cfg(not(unix))] const fn host() -> &'static str { "" } #[cfg(test)] mod test { use super::*; // This allows us to test the encoding without having to worry about the host, or absolute path struct Encoded(&'static str); impl fmt::Display for Encoded { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { for byte in self.0.bytes() { encode(f, byte)?; } Ok(()) } } #[test] fn test_unicode_encoding() { assert_eq!( Encoded("$*\x1bßé/∫😃\x07").to_string(), "%24%2A%1B%C3%9F%C3%A9/%E2%88%AB%F0%9F%98%83%07", ); } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/filesystem.rs
src/filesystem.rs
use std::borrow::Cow; use std::env; use std::ffi::OsStr; use std::fs; use std::io; #[cfg(any(unix, target_os = "redox"))] use std::os::unix::fs::FileTypeExt; use std::path::{Path, PathBuf}; use normpath::PathExt; use crate::dir_entry; pub fn path_absolute_form(path: &Path) -> io::Result<PathBuf> { if path.is_absolute() { return Ok(path.to_path_buf()); } let path = path.strip_prefix(".").unwrap_or(path); env::current_dir().map(|path_buf| path_buf.join(path)) } pub fn absolute_path(path: &Path) -> io::Result<PathBuf> { let path_buf = path_absolute_form(path)?; #[cfg(windows)] let path_buf = Path::new( path_buf .as_path() .to_string_lossy() .trim_start_matches(r"\\?\"), ) .to_path_buf(); Ok(path_buf) } pub fn is_existing_directory(path: &Path) -> bool { // Note: we do not use `.exists()` here, as `.` always exists, even if // the CWD has been deleted. path.is_dir() && (path.file_name().is_some() || path.normalize().is_ok()) } pub fn is_empty(entry: &dir_entry::DirEntry) -> bool { if let Some(file_type) = entry.file_type() { if file_type.is_dir() { if let Ok(mut entries) = fs::read_dir(entry.path()) { entries.next().is_none() } else { false } } else if file_type.is_file() { entry.metadata().map(|m| m.len() == 0).unwrap_or(false) } else { false } } else { false } } #[cfg(any(unix, target_os = "redox"))] pub fn is_block_device(ft: fs::FileType) -> bool { ft.is_block_device() } #[cfg(windows)] pub fn is_block_device(_: fs::FileType) -> bool { false } #[cfg(any(unix, target_os = "redox"))] pub fn is_char_device(ft: fs::FileType) -> bool { ft.is_char_device() } #[cfg(windows)] pub fn is_char_device(_: fs::FileType) -> bool { false } #[cfg(any(unix, target_os = "redox"))] pub fn is_socket(ft: fs::FileType) -> bool { ft.is_socket() } #[cfg(windows)] pub fn is_socket(_: fs::FileType) -> bool { false } #[cfg(any(unix, target_os = "redox"))] pub fn is_pipe(ft: fs::FileType) -> bool { ft.is_fifo() } #[cfg(windows)] pub fn is_pipe(_: fs::FileType) -> bool { false } #[cfg(any(unix, target_os = "redox"))] pub fn osstr_to_bytes(input: &OsStr) -> Cow<'_, [u8]> { use std::os::unix::ffi::OsStrExt; Cow::Borrowed(input.as_bytes()) } #[cfg(windows)] pub fn osstr_to_bytes(input: &OsStr) -> Cow<'_, [u8]> { let string = input.to_string_lossy(); match string { Cow::Owned(string) => Cow::Owned(string.into_bytes()), Cow::Borrowed(string) => Cow::Borrowed(string.as_bytes()), } } /// Remove the `./` prefix from a path. pub fn strip_current_dir(path: &Path) -> &Path { path.strip_prefix(".").unwrap_or(path) } /// Default value for the path_separator, mainly for MSYS/MSYS2, which set the MSYSTEM /// environment variable, and we set fd's path separator to '/' rather than Rust's default of '\'. /// /// Returns Some to use a nonstandard path separator, or None to use rust's default on the target /// platform. pub fn default_path_separator() -> Option<String> { if cfg!(windows) { let msystem = env::var("MSYSTEM").ok()?; if !msystem.is_empty() { return Some("/".to_owned()); } } None } #[cfg(test)] mod tests { use super::strip_current_dir; use std::path::Path; #[test] fn strip_current_dir_basic() { assert_eq!(strip_current_dir(Path::new("./foo")), Path::new("foo")); assert_eq!(strip_current_dir(Path::new("foo")), Path::new("foo")); assert_eq!( strip_current_dir(Path::new("./foo/bar/baz")), Path::new("foo/bar/baz") ); assert_eq!( strip_current_dir(Path::new("foo/bar/baz")), Path::new("foo/bar/baz") ); } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/cli.rs
src/cli.rs
use std::num::NonZeroUsize; use std::path::{Path, PathBuf}; use std::time::Duration; use anyhow::anyhow; use clap::{ Arg, ArgAction, ArgGroup, ArgMatches, Command, Parser, ValueEnum, error::ErrorKind, value_parser, }; #[cfg(feature = "completions")] use clap_complete::Shell; use normpath::PathExt; use crate::error::print_error; use crate::exec::CommandSet; use crate::filesystem; #[cfg(unix)] use crate::filter::OwnerFilter; use crate::filter::SizeFilter; #[derive(Parser)] #[command( name = "fd", version, about = "A program to find entries in your filesystem", after_long_help = "Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues", max_term_width = 98, args_override_self = true, group(ArgGroup::new("execs").args(&["exec", "exec_batch", "list_details"]).conflicts_with_all(&[ "max_results", "quiet", "max_one_result"])), )] pub struct Opts { /// Include hidden directories and files in the search results (default: /// hidden files and directories are skipped). Files and directories are /// considered to be hidden if their name starts with a `.` sign (dot). /// Any files or directories that are ignored due to the rules described by /// --no-ignore are still ignored unless otherwise specified. /// The flag can be overridden with --no-hidden. #[arg( long, short = 'H', help = "Search hidden files and directories", long_help )] pub hidden: bool, /// Overrides --hidden #[arg(long, overrides_with = "hidden", hide = true, action = ArgAction::SetTrue)] no_hidden: (), /// Show search results from files and directories that would otherwise be /// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file, /// The flag can be overridden with --ignore. #[arg( long, short = 'I', help = "Do not respect .(git|fd)ignore files", long_help )] pub no_ignore: bool, /// Overrides --no-ignore #[arg(long, overrides_with = "no_ignore", hide = true, action = ArgAction::SetTrue)] ignore: (), ///Show search results from files and directories that ///would otherwise be ignored by '.gitignore' files. ///The flag can be overridden with --ignore-vcs. #[arg( long, hide_short_help = true, help = "Do not respect .gitignore files", long_help )] pub no_ignore_vcs: bool, /// Overrides --no-ignore-vcs #[arg(long, overrides_with = "no_ignore_vcs", hide = true, action = ArgAction::SetTrue)] ignore_vcs: (), /// Do not require a git repository to respect gitignores. /// By default, fd will only respect global gitignore rules, .gitignore rules, /// and local exclude rules if fd detects that you are searching inside a /// git repository. This flag allows you to relax this restriction such that /// fd will respect all git related ignore rules regardless of whether you're /// searching in a git repository or not. /// /// /// This flag can be disabled with --require-git. #[arg( long, overrides_with = "require_git", hide_short_help = true, // same description as ripgrep's flag: ripgrep/crates/core/app.rs long_help )] pub no_require_git: bool, /// Overrides --no-require-git #[arg(long, overrides_with = "no_require_git", hide = true, action = ArgAction::SetTrue)] require_git: (), /// Show search results from files and directories that would otherwise be /// ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories. #[arg( long, hide_short_help = true, help = "Do not respect .(git|fd)ignore files in parent directories", long_help )] pub no_ignore_parent: bool, /// Do not respect the global ignore file #[arg(long, hide = true)] pub no_global_ignore_file: bool, /// Perform an unrestricted search, including ignored and hidden files. This is /// an alias for '--no-ignore --hidden'. #[arg(long = "unrestricted", short = 'u', overrides_with_all(&["ignore", "no_hidden"]), action(ArgAction::Count), hide_short_help = true, help = "Unrestricted search, alias for '--no-ignore --hidden'", long_help, )] rg_alias_hidden_ignore: u8, /// Case-sensitive search (default: smart case) #[arg( long, short = 's', overrides_with("ignore_case"), long_help = "Perform a case-sensitive search. By default, fd uses case-insensitive \ searches, unless the pattern contains an uppercase character (smart \ case)." )] pub case_sensitive: bool, /// Perform a case-insensitive search. By default, fd uses case-insensitive /// searches, unless the pattern contains an uppercase character (smart /// case). #[arg( long, short = 'i', overrides_with("case_sensitive"), help = "Case-insensitive search (default: smart case)", long_help )] pub ignore_case: bool, /// Perform a glob-based search instead of a regular expression search. #[arg( long, short = 'g', conflicts_with("fixed_strings"), help = "Glob-based search (default: regular expression)", long_help )] pub glob: bool, /// Perform a regular-expression based search (default). This can be used to /// override --glob. #[arg( long, overrides_with("glob"), hide_short_help = true, help = "Regular-expression based search (default)", long_help )] pub regex: bool, /// Treat the pattern as a literal string instead of a regular expression. Note /// that this also performs substring comparison. If you want to match on an /// exact filename, consider using '--glob'. #[arg( long, short = 'F', alias = "literal", hide_short_help = true, help = "Treat pattern as literal string stead of regex", long_help )] pub fixed_strings: bool, /// Add additional required search patterns, all of which must be matched. Multiple /// additional patterns can be specified. The patterns are regular /// expressions, unless '--glob' or '--fixed-strings' is used. #[arg( long = "and", value_name = "pattern", help = "Additional search patterns that need to be matched", long_help, hide_short_help = true, allow_hyphen_values = true )] pub exprs: Option<Vec<String>>, /// Shows the full path starting from the root as opposed to relative paths. /// The flag can be overridden with --relative-path. #[arg( long, short = 'a', help = "Show absolute instead of relative paths", long_help )] pub absolute_path: bool, /// Overrides --absolute-path #[arg(long, overrides_with = "absolute_path", hide = true, action = ArgAction::SetTrue)] relative_path: (), /// Use a detailed listing format like 'ls -l'. This is basically an alias /// for '--exec-batch ls -l' with some additional 'ls' options. This can be /// used to see more metadata, to show symlink targets and to achieve a /// deterministic sort order. #[arg( long, short = 'l', conflicts_with("absolute_path"), help = "Use a long listing format with file metadata", long_help )] pub list_details: bool, /// Follow symbolic links #[arg( long, short = 'L', alias = "dereference", long_help = "By default, fd does not descend into symlinked directories. Using this \ flag, symbolic links are also traversed. \ Flag can be overridden with --no-follow." )] pub follow: bool, /// Overrides --follow #[arg(long, overrides_with = "follow", hide = true, action = ArgAction::SetTrue)] no_follow: (), /// By default, the search pattern is only matched against the filename (or directory name). Using this flag, the pattern is matched against the full (absolute) path. Example: /// fd --glob -p '**/.git/config' #[arg( long, short = 'p', help = "Search full abs. path (default: filename only)", long_help, verbatim_doc_comment )] pub full_path: bool, /// Separate search results by the null character (instead of newlines). /// Useful for piping results to 'xargs'. #[arg( long = "print0", short = '0', conflicts_with("list_details"), hide_short_help = true, help = "Separate search results by the null character", long_help )] pub null_separator: bool, /// Limit the directory traversal to a given depth. By default, there is no /// limit on the search depth. #[arg( long, short = 'd', value_name = "depth", alias("maxdepth"), help = "Set maximum search depth (default: none)", long_help )] max_depth: Option<usize>, /// Only show search results starting at the given depth. /// See also: '--max-depth' and '--exact-depth' #[arg( long, value_name = "depth", hide_short_help = true, alias("mindepth"), help = "Only show search results starting at the given depth.", long_help )] min_depth: Option<usize>, /// Only show search results at the exact given depth. This is an alias for /// '--min-depth <depth> --max-depth <depth>'. #[arg(long, value_name = "depth", hide_short_help = true, conflicts_with_all(&["max_depth", "min_depth"]), help = "Only show search results at the exact given depth", long_help, )] exact_depth: Option<usize>, /// Exclude files/directories that match the given glob pattern. This /// overrides any other ignore logic. Multiple exclude patterns can be /// specified. /// /// Examples: /// {n} --exclude '*.pyc' /// {n} --exclude node_modules #[arg( long, short = 'E', value_name = "pattern", help = "Exclude entries that match the given glob pattern", long_help )] pub exclude: Vec<String>, /// Do not traverse into directories that match the search criteria. If /// you want to exclude specific directories, use the '--exclude=…' option. #[arg(long, hide_short_help = true, conflicts_with_all(&["size", "exact_depth"]), long_help, )] pub prune: bool, /// Filter the search by type: /// {n} 'f' or 'file': regular files /// {n} 'd' or 'dir' or 'directory': directories /// {n} 'l' or 'symlink': symbolic links /// {n} 's' or 'socket': socket /// {n} 'p' or 'pipe': named pipe (FIFO) /// {n} 'b' or 'block-device': block device /// {n} 'c' or 'char-device': character device /// {n}{n} 'x' or 'executable': executables /// {n} 'e' or 'empty': empty files or directories /// /// This option can be specified more than once to include multiple file types. /// Searching for '--type file --type symlink' will show both regular files as /// well as symlinks. Note that the 'executable' and 'empty' filters work differently: /// '--type executable' implies '--type file' by default. And '--type empty' searches /// for empty files and directories, unless either '--type file' or '--type directory' /// is specified in addition. /// /// Examples: /// {n} - Only search for files: /// {n} fd --type file … /// {n} fd -tf … /// {n} - Find both files and symlinks /// {n} fd --type file --type symlink … /// {n} fd -tf -tl … /// {n} - Find executable files: /// {n} fd --type executable /// {n} fd -tx /// {n} - Find empty files: /// {n} fd --type empty --type file /// {n} fd -te -tf /// {n} - Find empty directories: /// {n} fd --type empty --type directory /// {n} fd -te -td #[arg( long = "type", short = 't', value_name = "filetype", hide_possible_values = true, value_enum, help = "Filter by type: file (f), directory (d/dir), symlink (l), \ executable (x), empty (e), socket (s), pipe (p), \ char-device (c), block-device (b)", long_help )] pub filetype: Option<Vec<FileType>>, /// (Additionally) filter search results by their file extension. Multiple /// allowable file extensions can be specified. /// /// If you want to search for files without extension, /// you can use the regex '^[^.]+$' as a normal search pattern. #[arg( long = "extension", short = 'e', value_name = "ext", help = "Filter by file extension", long_help )] pub extensions: Option<Vec<String>>, /// Limit results based on the size of files using the format <+-><NUM><UNIT>. /// '+': file size must be greater than or equal to this /// '-': file size must be less than or equal to this /// /// If neither '+' nor '-' is specified, file size must be exactly equal to this. /// 'NUM': The numeric size (e.g. 500) /// 'UNIT': The units for NUM. They are not case-sensitive. /// Allowed unit values: /// 'b': bytes /// 'k': kilobytes (base ten, 10^3 = 1000 bytes) /// 'm': megabytes /// 'g': gigabytes /// 't': terabytes /// 'ki': kibibytes (base two, 2^10 = 1024 bytes) /// 'mi': mebibytes /// 'gi': gibibytes /// 'ti': tebibytes #[arg(long, short = 'S', value_parser = SizeFilter::from_string, allow_hyphen_values = true, verbatim_doc_comment, value_name = "size", help = "Limit results based on the size of files", long_help, verbatim_doc_comment, )] pub size: Vec<SizeFilter>, /// Filter results based on the file modification time. Files with modification times /// greater than the argument are returned. The argument can be provided /// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min). /// If the time is not specified, it defaults to 00:00:00. /// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases. /// /// Examples: /// {n} --changed-within 2weeks /// {n} --change-newer-than '2018-10-27 10:00:00' /// {n} --newer 2018-10-27 /// {n} --changed-after 1day #[arg( long, alias("change-newer-than"), alias("newer"), alias("changed-after"), value_name = "date|dur", help = "Filter by file modification time (newer than)", long_help )] pub changed_within: Option<String>, /// Filter results based on the file modification time. Files with modification times /// less than the argument are returned. The argument can be provided /// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min). /// '--change-older-than' or '--older' can be used as aliases. /// /// Examples: /// {n} --changed-before '2018-10-27 10:00:00' /// {n} --change-older-than 2weeks /// {n} --older 2018-10-27 #[arg( long, alias("change-older-than"), alias("older"), value_name = "date|dur", help = "Filter by file modification time (older than)", long_help )] pub changed_before: Option<String>, /// Filter files by their user and/or group. /// Format: [(user|uid)][:(group|gid)]. Either side is optional. /// Precede either side with a '!' to exclude files instead. /// /// Examples: /// {n} --owner john /// {n} --owner :students /// {n} --owner '!john:students' #[cfg(unix)] #[arg(long, short = 'o', value_parser = OwnerFilter::from_string, value_name = "user:group", help = "Filter by owning user and/or group", long_help, )] pub owner: Option<OwnerFilter>, /// Instead of printing the file normally, print the format string with the following placeholders replaced: /// '{}': path (of the current search result) /// '{/}': basename /// '{//}': parent directory /// '{.}': path without file extension /// '{/.}': basename without file extension #[arg( long, value_name = "fmt", help = "Print results according to template", conflicts_with = "list_details" )] pub format: Option<String>, #[command(flatten)] pub exec: Exec, /// Maximum number of arguments to pass to the command given with -X. /// If the number of results is greater than the given size, /// the command given with -X is run again with remaining arguments. /// A batch size of zero means there is no limit (default), but note /// that batching might still happen due to OS restrictions on the /// maximum length of command lines. #[arg( long, value_name = "size", hide_short_help = true, requires("exec_batch"), value_parser = value_parser!(usize), default_value_t, help = "Max number of arguments to run as a batch size with -X", long_help, )] pub batch_size: usize, /// Add a custom ignore-file in '.gitignore' format. These files have a low precedence. #[arg( long, value_name = "path", hide_short_help = true, help = "Add a custom ignore-file in '.gitignore' format", long_help )] pub ignore_file: Vec<PathBuf>, /// Declare when to use color for the pattern match output #[arg( long, short = 'c', value_enum, default_value_t = ColorWhen::Auto, value_name = "when", help = "When to use colors", long_help, )] pub color: ColorWhen, /// Add a terminal hyperlink to a file:// url for each path in the output. /// /// Auto mode is used if no argument is given to this option. /// /// This doesn't do anything for --exec and --exec-batch. #[arg( long, alias = "hyper", value_name = "when", require_equals = true, value_enum, default_value_t = HyperlinkWhen::Never, default_missing_value = "auto", num_args = 0..=1, help = "Add hyperlinks to output paths" )] pub hyperlink: HyperlinkWhen, /// Set number of threads to use for searching & executing (default: number /// of available CPU cores) #[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = str::parse::<NonZeroUsize>)] pub threads: Option<NonZeroUsize>, /// Milliseconds to buffer before streaming search results to console /// /// Amount of time in milliseconds to buffer, before streaming the search /// results to the console. #[arg(long, hide = true, value_parser = parse_millis)] pub max_buffer_time: Option<Duration>, ///Limit the number of search results to 'count' and quit immediately. #[arg( long, value_name = "count", hide_short_help = true, overrides_with("max_one_result"), help = "Limit the number of search results", long_help )] max_results: Option<usize>, /// Limit the search to a single result and quit immediately. /// This is an alias for '--max-results=1'. #[arg( short = '1', hide_short_help = true, overrides_with("max_results"), help = "Limit search to a single result", long_help )] max_one_result: bool, /// When the flag is present, the program does not print anything and will /// return with an exit code of 0 if there is at least one match. Otherwise, the /// exit code will be 1. /// '--has-results' can be used as an alias. #[arg( long, short = 'q', alias = "has-results", hide_short_help = true, conflicts_with("max_results"), help = "Print nothing, exit code 0 if match found, 1 otherwise", long_help )] pub quiet: bool, /// Enable the display of filesystem errors for situations such as /// insufficient permissions or dead symlinks. #[arg( long, hide_short_help = true, help = "Show filesystem errors", long_help )] pub show_errors: bool, /// Change the current working directory of fd to the provided path. This /// means that search results will be shown with respect to the given base /// path. Note that relative paths which are passed to fd via the positional /// <path> argument or the '--search-path' option will also be resolved /// relative to this directory. #[arg( long, short = 'C', value_name = "path", hide_short_help = true, help = "Change current working directory", long_help )] pub base_directory: Option<PathBuf>, /// the search pattern which is either a regular expression (default) or a glob /// pattern (if --glob is used). If no pattern has been specified, every entry /// is considered a match. If your pattern starts with a dash (-), make sure to /// pass '--' first, or it will be considered as a flag (fd -- '-foo'). #[arg( default_value = "", hide_default_value = true, value_name = "pattern", help = "the search pattern (a regular expression, unless '--glob' is used; optional)", long_help )] pub pattern: String, /// Set the path separator to use when printing file paths. The default is /// the OS-specific separator ('/' on Unix, '\' on Windows). #[arg( long, value_name = "separator", hide_short_help = true, help = "Set path separator when printing file paths", long_help )] pub path_separator: Option<String>, /// The directory where the filesystem search is rooted (optional). If /// omitted, search the current working directory. #[arg(action = ArgAction::Append, value_name = "path", help = "the root directories for the filesystem search (optional)", long_help, )] path: Vec<PathBuf>, /// Provide paths to search as an alternative to the positional <path> /// argument. Changes the usage to `fd [OPTIONS] --search-path <path> /// --search-path <path2> [<pattern>]` #[arg( long, conflicts_with("path"), value_name = "search-path", hide_short_help = true, help = "Provides paths to search as an alternative to the positional <path> argument", long_help )] search_path: Vec<PathBuf>, /// By default, relative paths are prefixed with './' when -x/--exec, /// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a /// path starting with '-' being treated as a command line option. Use /// this flag to change this behavior. If this flag is used without a value, /// it is equivalent to passing "always". #[arg(long, conflicts_with_all(&["path", "search_path"]), value_name = "when", hide_short_help = true, require_equals = true, long_help)] strip_cwd_prefix: Option<Option<StripCwdWhen>>, /// By default, fd will traverse the file system tree as far as other options /// dictate. With this flag, fd ensures that it does not descend into a /// different file system than the one it started in. Comparable to the -mount /// or -xdev filters of find(1). #[cfg(any(unix, windows))] #[arg(long, aliases(&["mount", "xdev"]), hide_short_help = true, long_help)] pub one_file_system: bool, #[cfg(feature = "completions")] #[arg(long, hide = true, exclusive = true)] gen_completions: Option<Option<Shell>>, } impl Opts { pub fn search_paths(&self) -> anyhow::Result<Vec<PathBuf>> { // would it make sense to concatenate these? let paths = if !self.path.is_empty() { &self.path } else if !self.search_path.is_empty() { &self.search_path } else { let current_directory = Path::new("./"); ensure_current_directory_exists(current_directory)?; return Ok(vec![self.normalize_path(current_directory)]); }; Ok(paths .iter() .filter_map(|path| { if filesystem::is_existing_directory(path) { Some(self.normalize_path(path)) } else { print_error(format!( "Search path '{}' is not a directory.", path.to_string_lossy() )); None } }) .collect()) } fn normalize_path(&self, path: &Path) -> PathBuf { if self.absolute_path { filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap() } else if path == Path::new(".") { // Change "." to "./" as a workaround for https://github.com/BurntSushi/ripgrep/pull/2711 PathBuf::from("./") } else { path.to_path_buf() } } pub fn no_search_paths(&self) -> bool { self.path.is_empty() && self.search_path.is_empty() } #[inline] pub fn rg_alias_ignore(&self) -> bool { self.rg_alias_hidden_ignore > 0 } pub fn max_depth(&self) -> Option<usize> { self.max_depth.or(self.exact_depth) } pub fn min_depth(&self) -> Option<usize> { self.min_depth.or(self.exact_depth) } pub fn threads(&self) -> NonZeroUsize { self.threads.unwrap_or_else(default_num_threads) } pub fn max_results(&self) -> Option<usize> { self.max_results .filter(|&m| m > 0) .or_else(|| self.max_one_result.then_some(1)) } pub fn strip_cwd_prefix<P: FnOnce() -> bool>(&self, auto_pred: P) -> bool { use self::StripCwdWhen::*; self.no_search_paths() && match self.strip_cwd_prefix.map_or(Auto, |o| o.unwrap_or(Always)) { Auto => auto_pred(), Always => true, Never => false, } } #[cfg(feature = "completions")] pub fn gen_completions(&self) -> anyhow::Result<Option<Shell>> { self.gen_completions .map(|maybe_shell| match maybe_shell { Some(sh) => Ok(sh), None => { Shell::from_env().ok_or_else(|| anyhow!("Unable to get shell from environment")) } }) .transpose() } } /// Get the default number of threads to use, if not explicitly specified. fn default_num_threads() -> NonZeroUsize { // If we can't get the amount of parallelism for some reason, then // default to a single thread, because that is safe. let fallback = NonZeroUsize::MIN; // To limit startup overhead on massively parallel machines, don't use more // than 64 threads. let limit = NonZeroUsize::new(64).unwrap(); std::thread::available_parallelism() .unwrap_or(fallback) .min(limit) } #[derive(Copy, Clone, PartialEq, Eq, ValueEnum)] pub enum FileType { #[value(alias = "f")] File, #[value(alias = "d", alias = "dir")] Directory, #[value(alias = "l")] Symlink, #[value(alias = "b")] BlockDevice, #[value(alias = "c")] CharDevice, /// A file which is executable by the current effective user #[value(alias = "x")] Executable, #[value(alias = "e")] Empty, #[value(alias = "s")] Socket, #[value(alias = "p")] Pipe, } #[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)] pub enum ColorWhen { /// show colors if the output goes to an interactive console (default) Auto, /// always use colorized output Always, /// do not use colorized output Never, } #[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)] pub enum StripCwdWhen { /// Use the default behavior Auto, /// Always strip the ./ at the beginning of paths Always, /// Never strip the ./ Never, } #[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)] pub enum HyperlinkWhen { /// Use hyperlinks only if color is enabled Auto, /// Always use hyperlinks when printing file paths Always, /// Never use hyperlinks Never, } // there isn't a derive api for getting grouped values yet, // so we have to use hand-rolled parsing for exec and exec-batch pub struct Exec { pub command: Option<CommandSet>, } impl clap::FromArgMatches for Exec { fn from_arg_matches(matches: &ArgMatches) -> clap::error::Result<Self> { let command = matches .get_occurrences::<String>("exec") .map(CommandSet::new) .or_else(|| { matches .get_occurrences::<String>("exec_batch") .map(CommandSet::new_batch) }) .transpose() .map_err(|e| clap::Error::raw(ErrorKind::InvalidValue, e))?; Ok(Exec { command }) } fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> clap::error::Result<()> { *self = Self::from_arg_matches(matches)?; Ok(()) } } impl clap::Args for Exec { fn augment_args(cmd: Command) -> Command { cmd.arg(Arg::new("exec") .action(ArgAction::Append) .long("exec") .short('x') .num_args(1..) .allow_hyphen_values(true) .value_terminator(";") .value_name("cmd") .conflicts_with("list_details") .help("Execute a command for each search result") .long_help( "Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \ There is no guarantee of the order commands are executed in, and the order should not be depended upon. \ All positional arguments following --exec are considered to be arguments to the command - not to fd. \ It is therefore recommended to place the '-x'/'--exec' option last.\n\ The following placeholders are substituted before the command is executed:\n \ '{}': path (of the current search result)\n \ '{/}': basename\n \ '{//}': parent directory\n \ '{.}': path without file extension\n \ '{/.}': basename without file extension\n \ '{{': literal '{' (for escaping)\n \ '}}': literal '}' (for escaping)\n\n\ If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\ Examples:\n\n \ - find all *.zip files and unzip them:\n\n \ fd -e zip -x unzip\n\n \ - find *.h and *.cpp files and run \"clang-format -i ..\" for each of them:\n\n \ fd -e h -e cpp -x clang-format -i\n\n \ - Convert all *.jpg files to *.png files:\n\n \ fd -e jpg -x convert {} {.}.png\ ", ), ) .arg( Arg::new("exec_batch") .action(ArgAction::Append) .long("exec-batch") .short('X') .num_args(1..) .allow_hyphen_values(true) .value_terminator(";") .value_name("cmd") .conflicts_with_all(["exec", "list_details"]) .help("Execute a command with all search results at once") .long_help( "Execute the given command once, with all search results as arguments.\n\ The order of the arguments is non-deterministic, and should not be relied upon.\n\ One of the following placeholders is substituted before the command is executed:\n \ '{}': path (of all search results)\n \ '{/}': basename\n \ '{//}': parent directory\n \ '{.}': path without file extension\n \
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
true
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/error.rs
src/error.rs
pub fn print_error(msg: impl Into<String>) { eprintln!("[fd error]: {}", msg.into()); }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/walk.rs
src/walk.rs
use std::borrow::Cow; use std::ffi::OsStr; use std::io::{self, Write}; use std::mem; use std::path::PathBuf; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex, MutexGuard}; use std::thread; use std::time::{Duration, Instant}; use anyhow::{Result, anyhow}; use crossbeam_channel::{Receiver, RecvTimeoutError, SendError, Sender, bounded}; use etcetera::BaseStrategy; use ignore::overrides::{Override, OverrideBuilder}; use ignore::{WalkBuilder, WalkParallel, WalkState}; use regex::bytes::Regex; use crate::config::Config; use crate::dir_entry::DirEntry; use crate::error::print_error; use crate::exec; use crate::exit_codes::{ExitCode, merge_exitcodes}; use crate::filesystem; use crate::output; /// The receiver thread can either be buffering results or directly streaming to the console. #[derive(PartialEq)] enum ReceiverMode { /// Receiver is still buffering in order to sort the results, if the search finishes fast /// enough. Buffering, /// Receiver is directly printing results to the output. Streaming, } /// The Worker threads can result in a valid entry having PathBuf or an error. #[allow(clippy::large_enum_variant)] #[derive(Debug)] pub enum WorkerResult { // Errors should be rare, so it's probably better to allow large_enum_variant than // to box the Entry variant Entry(DirEntry), Error(ignore::Error), } /// A batch of WorkerResults to send over a channel. #[derive(Clone)] struct Batch { items: Arc<Mutex<Option<Vec<WorkerResult>>>>, } impl Batch { fn new() -> Self { Self { items: Arc::new(Mutex::new(Some(vec![]))), } } fn lock(&self) -> MutexGuard<'_, Option<Vec<WorkerResult>>> { self.items.lock().unwrap() } } impl IntoIterator for Batch { type Item = WorkerResult; type IntoIter = std::vec::IntoIter<WorkerResult>; fn into_iter(self) -> Self::IntoIter { self.lock().take().unwrap().into_iter() } } /// Wrapper that sends batches of items at once over a channel. struct BatchSender { batch: Batch, tx: Sender<Batch>, limit: usize, } impl BatchSender { fn new(tx: Sender<Batch>, limit: usize) -> Self { Self { batch: Batch::new(), tx, limit, } } /// Check if we need to flush a batch. fn needs_flush(&self, batch: Option<&Vec<WorkerResult>>) -> bool { match batch { // Limit the batch size to provide some backpressure Some(vec) => vec.len() >= self.limit, // Batch was already taken by the receiver, so make a new one None => true, } } /// Add an item to a batch. fn send(&mut self, item: WorkerResult) -> Result<(), SendError<()>> { let mut batch = self.batch.lock(); if self.needs_flush(batch.as_ref()) { drop(batch); self.batch = Batch::new(); batch = self.batch.lock(); } let items = batch.as_mut().unwrap(); items.push(item); if items.len() == 1 { // New batch, send it over the channel self.tx .send(self.batch.clone()) .map_err(|_| SendError(()))?; } Ok(()) } } /// Maximum size of the output buffer before flushing results to the console const MAX_BUFFER_LENGTH: usize = 1000; /// Default duration until output buffering switches to streaming. const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100); /// Wrapper for the receiver thread's buffering behavior. struct ReceiverBuffer<'a, W> { /// The configuration. config: &'a Config, /// For shutting down the senders. quit_flag: &'a AtomicBool, /// The ^C notifier. interrupt_flag: &'a AtomicBool, /// Receiver for worker results. rx: Receiver<Batch>, /// Standard output. stdout: W, /// The current buffer mode. mode: ReceiverMode, /// The deadline to switch to streaming mode. deadline: Instant, /// The buffer of quickly received paths. buffer: Vec<DirEntry>, /// Result count. num_results: usize, } impl<'a, W: Write> ReceiverBuffer<'a, W> { /// Create a new receiver buffer. fn new(state: &'a WorkerState, rx: Receiver<Batch>, stdout: W) -> Self { let config = &state.config; let quit_flag = state.quit_flag.as_ref(); let interrupt_flag = state.interrupt_flag.as_ref(); let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME); let deadline = Instant::now() + max_buffer_time; Self { config, quit_flag, interrupt_flag, rx, stdout, mode: ReceiverMode::Buffering, deadline, buffer: Vec::with_capacity(MAX_BUFFER_LENGTH), num_results: 0, } } /// Process results until finished. fn process(&mut self) -> ExitCode { loop { if let Err(ec) = self.poll() { self.quit_flag.store(true, Ordering::Relaxed); return ec; } } } /// Receive the next worker result. fn recv(&self) -> Result<Batch, RecvTimeoutError> { match self.mode { ReceiverMode::Buffering => { // Wait at most until we should switch to streaming self.rx.recv_deadline(self.deadline) } ReceiverMode::Streaming => { // Wait however long it takes for a result Ok(self.rx.recv()?) } } } /// Wait for a result or state change. fn poll(&mut self) -> Result<(), ExitCode> { match self.recv() { Ok(batch) => { for result in batch { match result { WorkerResult::Entry(dir_entry) => { if self.config.quiet { return Err(ExitCode::HasResults(true)); } match self.mode { ReceiverMode::Buffering => { self.buffer.push(dir_entry); if self.buffer.len() > MAX_BUFFER_LENGTH { self.stream()?; } } ReceiverMode::Streaming => { self.print(&dir_entry)?; } } self.num_results += 1; if let Some(max_results) = self.config.max_results && self.num_results >= max_results { return self.stop(); } } WorkerResult::Error(err) => { if self.config.show_filesystem_errors { print_error(err.to_string()); } } } } // If we don't have another batch ready, flush before waiting if self.mode == ReceiverMode::Streaming && self.rx.is_empty() { self.flush()?; } } Err(RecvTimeoutError::Timeout) => { self.stream()?; } Err(RecvTimeoutError::Disconnected) => { return self.stop(); } } Ok(()) } /// Output a path. fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> { if let Err(e) = output::print_entry(&mut self.stdout, entry, self.config) && e.kind() != ::std::io::ErrorKind::BrokenPipe { print_error(format!("Could not write to output: {e}")); return Err(ExitCode::GeneralError); } if self.interrupt_flag.load(Ordering::Relaxed) { // Ignore any errors on flush, because we're about to exit anyway let _ = self.flush(); return Err(ExitCode::KilledBySigint); } Ok(()) } /// Switch ourselves into streaming mode. fn stream(&mut self) -> Result<(), ExitCode> { self.mode = ReceiverMode::Streaming; let buffer = mem::take(&mut self.buffer); for path in buffer { self.print(&path)?; } self.flush() } /// Stop looping. fn stop(&mut self) -> Result<(), ExitCode> { if self.mode == ReceiverMode::Buffering { self.buffer.sort(); self.stream()?; } if self.config.quiet { Err(ExitCode::HasResults(self.num_results > 0)) } else { Err(ExitCode::Success) } } /// Flush stdout if necessary. fn flush(&mut self) -> Result<(), ExitCode> { if self.stdout.flush().is_err() { // Probably a broken pipe. Exit gracefully. return Err(ExitCode::GeneralError); } Ok(()) } } /// State shared by the sender and receiver threads. struct WorkerState { /// The search patterns. patterns: Vec<Regex>, /// The command line configuration. config: Config, /// Flag for cleanly shutting down the parallel walk quit_flag: Arc<AtomicBool>, /// Flag specifically for quitting due to ^C interrupt_flag: Arc<AtomicBool>, } impl WorkerState { fn new(patterns: Vec<Regex>, config: Config) -> Self { let quit_flag = Arc::new(AtomicBool::new(false)); let interrupt_flag = Arc::new(AtomicBool::new(false)); Self { patterns, config, quit_flag, interrupt_flag, } } fn build_overrides(&self, paths: &[PathBuf]) -> Result<Override> { let first_path = &paths[0]; let config = &self.config; let mut builder = OverrideBuilder::new(first_path); for pattern in &config.exclude_patterns { builder .add(pattern) .map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?; } builder .build() .map_err(|_| anyhow!("Mismatch in exclude patterns")) } fn build_walker(&self, paths: &[PathBuf]) -> Result<WalkParallel> { let first_path = &paths[0]; let config = &self.config; let overrides = self.build_overrides(paths)?; let mut builder = WalkBuilder::new(first_path); builder .hidden(config.ignore_hidden) .ignore(config.read_fdignore) .parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore)) .git_ignore(config.read_vcsignore) .git_global(config.read_vcsignore) .git_exclude(config.read_vcsignore) .require_git(config.require_git_to_read_vcsignore) .overrides(overrides) .follow_links(config.follow_links) // No need to check for supported platforms, option is unavailable on unsupported ones .same_file_system(config.one_file_system) .max_depth(config.max_depth); if config.read_fdignore { builder.add_custom_ignore_filename(".fdignore"); } if config.read_global_ignore && let Ok(basedirs) = etcetera::choose_base_strategy() { let global_ignore_file = basedirs.config_dir().join("fd").join("ignore"); if global_ignore_file.is_file() { let result = builder.add_ignore(global_ignore_file); match result { Some(ignore::Error::Partial(_)) => (), Some(err) => { print_error(format!("Malformed pattern in global ignore file. {err}.")); } None => (), } } } for ignore_file in &config.ignore_files { let result = builder.add_ignore(ignore_file); match result { Some(ignore::Error::Partial(_)) => (), Some(err) => { print_error(format!("Malformed pattern in custom ignore file. {err}.")); } None => (), } } for path in &paths[1..] { builder.add(path); } let walker = builder.threads(config.threads).build_parallel(); Ok(walker) } /// Run the receiver work, either on this thread or a pool of background /// threads (for --exec). fn receive(&self, rx: Receiver<Batch>) -> ExitCode { let config = &self.config; // This will be set to `Some` if the `--exec` argument was supplied. if let Some(ref cmd) = config.command { if cmd.in_batch_mode() { exec::batch(rx.into_iter().flatten(), cmd, config) } else { thread::scope(|scope| { // Each spawned job will store its thread handle in here. let threads = config.threads; let mut handles = Vec::with_capacity(threads); for _ in 0..threads { let rx = rx.clone(); // Spawn a job thread that will listen for and execute inputs. let handle = scope.spawn(|| exec::job(rx.into_iter().flatten(), cmd, config)); // Push the handle of the spawned thread into the vector for later joining. handles.push(handle); } let exit_codes = handles.into_iter().map(|handle| handle.join().unwrap()); merge_exitcodes(exit_codes) }) } } else { let stdout = io::stdout().lock(); let stdout = io::BufWriter::new(stdout); ReceiverBuffer::new(self, rx, stdout).process() } } /// Spawn the sender threads. fn spawn_senders(&self, walker: WalkParallel, tx: Sender<Batch>) { walker.run(|| { let patterns = &self.patterns; let config = &self.config; let quit_flag = self.quit_flag.as_ref(); let mut limit = 0x100; if let Some(cmd) = &config.command && !cmd.in_batch_mode() && config.threads > 1 { // Evenly distribute work between multiple receivers limit = 1; } let mut tx = BatchSender::new(tx.clone(), limit); Box::new(move |entry| { if quit_flag.load(Ordering::Relaxed) { return WalkState::Quit; } let entry = match entry { Ok(ref e) if e.depth() == 0 => { // Skip the root directory entry. return WalkState::Continue; } Ok(e) => DirEntry::normal(e), Err(ignore::Error::WithPath { path, err: inner_err, }) => match inner_err.as_ref() { ignore::Error::Io(io_error) if io_error.kind() == io::ErrorKind::NotFound && path .symlink_metadata() .ok() .is_some_and(|m| m.file_type().is_symlink()) => { DirEntry::broken_symlink(path) } _ => { return match tx.send(WorkerResult::Error(ignore::Error::WithPath { path, err: inner_err, })) { Ok(_) => WalkState::Continue, Err(_) => WalkState::Quit, }; } }, Err(err) => { return match tx.send(WorkerResult::Error(err)) { Ok(_) => WalkState::Continue, Err(_) => WalkState::Quit, }; } }; if let Some(min_depth) = config.min_depth && entry.depth().is_none_or(|d| d < min_depth) { return WalkState::Continue; } // Check the name first, since it doesn't require metadata let entry_path = entry.path(); let search_str: Cow<OsStr> = if config.search_full_path { let path_abs_buf = filesystem::path_absolute_form(entry_path) .expect("Retrieving absolute path succeeds"); Cow::Owned(path_abs_buf.as_os_str().to_os_string()) } else { match entry_path.file_name() { Some(filename) => Cow::Borrowed(filename), None => unreachable!( "Encountered file system entry without a file name. This should only \ happen for paths like 'foo/bar/..' or '/' which are not supposed to \ appear in a file system traversal." ), } }; if !patterns .iter() .all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref()))) { return WalkState::Continue; } // Filter out unwanted extensions. if let Some(ref exts_regex) = config.extensions { if let Some(path_str) = entry_path.file_name() { if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) { return WalkState::Continue; } } else { return WalkState::Continue; } } // Filter out unwanted file types. if let Some(ref file_types) = config.file_types && file_types.should_ignore(&entry) { return WalkState::Continue; } #[cfg(unix)] { if let Some(ref owner_constraint) = config.owner_constraint { if let Some(metadata) = entry.metadata() { if !owner_constraint.matches(metadata) { return WalkState::Continue; } } else { return WalkState::Continue; } } } // Filter out unwanted sizes if it is a file and we have been given size constraints. if !config.size_constraints.is_empty() { if entry_path.is_file() { if let Some(metadata) = entry.metadata() { let file_size = metadata.len(); if config .size_constraints .iter() .any(|sc| !sc.is_within(file_size)) { return WalkState::Continue; } } else { return WalkState::Continue; } } else { return WalkState::Continue; } } // Filter out unwanted modification times if !config.time_constraints.is_empty() { let mut matched = false; if let Some(metadata) = entry.metadata() && let Ok(modified) = metadata.modified() { matched = config .time_constraints .iter() .all(|tf| tf.applies_to(&modified)); } if !matched { return WalkState::Continue; } } if config.is_printing() && let Some(ls_colors) = &config.ls_colors { // Compute colors in parallel entry.style(ls_colors); } let send_result = tx.send(WorkerResult::Entry(entry)); if send_result.is_err() { return WalkState::Quit; } // Apply pruning. if config.prune { return WalkState::Skip; } WalkState::Continue }) }); } /// Perform the recursive scan. fn scan(&self, paths: &[PathBuf]) -> Result<ExitCode> { let config = &self.config; let walker = self.build_walker(paths)?; if config.ls_colors.is_some() && config.is_printing() { let quit_flag = Arc::clone(&self.quit_flag); let interrupt_flag = Arc::clone(&self.interrupt_flag); ctrlc::set_handler(move || { quit_flag.store(true, Ordering::Relaxed); if interrupt_flag.fetch_or(true, Ordering::Relaxed) { // Ctrl-C has been pressed twice, exit NOW ExitCode::KilledBySigint.exit(); } }) .unwrap(); } let (tx, rx) = bounded(2 * config.threads); let exit_code = thread::scope(|scope| { // Spawn the receiver thread(s) let receiver = scope.spawn(|| self.receive(rx)); // Spawn the sender threads. self.spawn_senders(walker, tx); receiver.join().unwrap() }); if self.interrupt_flag.load(Ordering::Relaxed) { Ok(ExitCode::KilledBySigint) } else { Ok(exit_code) } } } /// Recursively scan the given search path for files / pathnames matching the patterns. /// /// If the `--exec` argument was supplied, this will create a thread pool for executing /// jobs in parallel from a given command line and the discovered paths. Otherwise, each /// path will simply be written to standard output. pub fn scan(paths: &[PathBuf], patterns: Vec<Regex>, config: Config) -> Result<ExitCode> { WorkerState::new(patterns, config).scan(paths) }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/dir_entry.rs
src/dir_entry.rs
use std::cell::OnceCell; use std::ffi::OsString; use std::fs::{FileType, Metadata}; use std::path::{Path, PathBuf}; use lscolors::{Colorable, LsColors, Style}; use crate::config::Config; use crate::filesystem::strip_current_dir; #[derive(Debug)] enum DirEntryInner { Normal(ignore::DirEntry), BrokenSymlink(PathBuf), } #[derive(Debug)] pub struct DirEntry { inner: DirEntryInner, metadata: OnceCell<Option<Metadata>>, style: OnceCell<Option<Style>>, } impl DirEntry { #[inline] pub fn normal(e: ignore::DirEntry) -> Self { Self { inner: DirEntryInner::Normal(e), metadata: OnceCell::new(), style: OnceCell::new(), } } pub fn broken_symlink(path: PathBuf) -> Self { Self { inner: DirEntryInner::BrokenSymlink(path), metadata: OnceCell::new(), style: OnceCell::new(), } } pub fn path(&self) -> &Path { match &self.inner { DirEntryInner::Normal(e) => e.path(), DirEntryInner::BrokenSymlink(pathbuf) => pathbuf.as_path(), } } pub fn into_path(self) -> PathBuf { match self.inner { DirEntryInner::Normal(e) => e.into_path(), DirEntryInner::BrokenSymlink(p) => p, } } /// Returns the path as it should be presented to the user. pub fn stripped_path(&self, config: &Config) -> &Path { if config.strip_cwd_prefix { strip_current_dir(self.path()) } else { self.path() } } /// Returns the path as it should be presented to the user. pub fn into_stripped_path(self, config: &Config) -> PathBuf { if config.strip_cwd_prefix { self.stripped_path(config).to_path_buf() } else { self.into_path() } } pub fn file_type(&self) -> Option<FileType> { match &self.inner { DirEntryInner::Normal(e) => e.file_type(), DirEntryInner::BrokenSymlink(_) => self.metadata().map(|m| m.file_type()), } } pub fn metadata(&self) -> Option<&Metadata> { self.metadata .get_or_init(|| match &self.inner { DirEntryInner::Normal(e) => e.metadata().ok(), DirEntryInner::BrokenSymlink(path) => path.symlink_metadata().ok(), }) .as_ref() } pub fn depth(&self) -> Option<usize> { match &self.inner { DirEntryInner::Normal(e) => Some(e.depth()), DirEntryInner::BrokenSymlink(_) => None, } } pub fn style(&self, ls_colors: &LsColors) -> Option<&Style> { self.style .get_or_init(|| ls_colors.style_for(self).cloned()) .as_ref() } } impl PartialEq for DirEntry { #[inline] fn eq(&self, other: &Self) -> bool { self.path() == other.path() } } impl Eq for DirEntry {} impl PartialOrd for DirEntry { #[inline] fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } impl Ord for DirEntry { #[inline] fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.path().cmp(other.path()) } } impl Colorable for DirEntry { fn path(&self) -> PathBuf { self.path().to_owned() } fn file_name(&self) -> OsString { let name = match &self.inner { DirEntryInner::Normal(e) => e.file_name(), DirEntryInner::BrokenSymlink(path) => { // Path::file_name() only works if the last component is Normal, // but we want it for all component types, so we open code it. // Copied from LsColors::style_for_path_with_metadata(). path.components() .next_back() .map(|c| c.as_os_str()) .unwrap_or_else(|| path.as_os_str()) } }; name.to_owned() } fn file_type(&self) -> Option<FileType> { self.file_type() } fn metadata(&self) -> Option<Metadata> { self.metadata().cloned() } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/exit_codes.rs
src/exit_codes.rs
use std::process; #[cfg(unix)] use nix::sys::signal::{SigHandler, Signal, raise, signal}; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ExitCode { Success, HasResults(bool), GeneralError, KilledBySigint, } impl From<ExitCode> for i32 { fn from(code: ExitCode) -> Self { match code { ExitCode::Success => 0, ExitCode::HasResults(has_results) => !has_results as i32, ExitCode::GeneralError => 1, ExitCode::KilledBySigint => 130, } } } impl ExitCode { fn is_error(self) -> bool { i32::from(self) != 0 } /// Exit the process with the appropriate code. pub fn exit(self) -> ! { #[cfg(unix)] if self == ExitCode::KilledBySigint { // Get rid of the SIGINT handler, if present, and raise SIGINT unsafe { if signal(Signal::SIGINT, SigHandler::SigDfl).is_ok() { let _ = raise(Signal::SIGINT); } } } process::exit(self.into()) } } pub fn merge_exitcodes(results: impl IntoIterator<Item = ExitCode>) -> ExitCode { if results.into_iter().any(ExitCode::is_error) { return ExitCode::GeneralError; } ExitCode::Success } #[cfg(test)] mod tests { use super::*; #[test] fn success_when_no_results() { assert_eq!(merge_exitcodes([]), ExitCode::Success); } #[test] fn general_error_if_at_least_one_error() { assert_eq!( merge_exitcodes([ExitCode::GeneralError]), ExitCode::GeneralError ); assert_eq!( merge_exitcodes([ExitCode::KilledBySigint]), ExitCode::GeneralError ); assert_eq!( merge_exitcodes([ExitCode::KilledBySigint, ExitCode::Success]), ExitCode::GeneralError ); assert_eq!( merge_exitcodes([ExitCode::Success, ExitCode::GeneralError]), ExitCode::GeneralError ); assert_eq!( merge_exitcodes([ExitCode::GeneralError, ExitCode::KilledBySigint]), ExitCode::GeneralError ); } #[test] fn success_if_no_error() { assert_eq!(merge_exitcodes([ExitCode::Success]), ExitCode::Success); assert_eq!( merge_exitcodes([ExitCode::Success, ExitCode::Success]), ExitCode::Success ); } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/main.rs
src/main.rs
mod cli; mod config; mod dir_entry; mod error; mod exec; mod exit_codes; mod filesystem; mod filetypes; mod filter; mod fmt; mod hyperlink; mod output; mod regex_helper; mod walk; use std::env; use std::io::IsTerminal; use std::path::Path; use std::sync::Arc; use anyhow::{Context, Result, anyhow, bail}; use clap::{CommandFactory, Parser}; use globset::GlobBuilder; use lscolors::LsColors; use regex::bytes::{Regex, RegexBuilder, RegexSetBuilder}; use crate::cli::{ColorWhen, HyperlinkWhen, Opts}; use crate::config::Config; use crate::exec::CommandSet; use crate::exit_codes::ExitCode; use crate::filetypes::FileTypes; #[cfg(unix)] use crate::filter::OwnerFilter; use crate::filter::TimeFilter; use crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_with_leading_dot}; // We use jemalloc for performance reasons, see https://github.com/sharkdp/fd/pull/481 // FIXME: re-enable jemalloc on macOS, see comment in Cargo.toml file for more infos // This has to be kept in sync with the Cargo.toml file section that declares a // dependency on tikv-jemallocator. #[cfg(all( not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_os = "openbsd"), not(target_os = "illumos"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64"), feature = "use-jemalloc" ))] #[global_allocator] static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; // vivid --color-mode 8-bit generate molokai const DEFAULT_LS_COLORS: &str = " ow=0:or=0;38;5;16;48;5;203:no=0:ex=1;38;5;203:cd=0;38;5;203;48;5;236:mi=0;38;5;16;48;5;203:*~=0;38;5;243:st=0:pi=0;38;5;16;48;5;81:fi=0:di=0;38;5;81:so=0;38;5;16;48;5;203:bd=0;38;5;81;48;5;236:tw=0:ln=0;38;5;203:*.m=0;38;5;48:*.o=0;38;5;243:*.z=4;38;5;203:*.a=1;38;5;203:*.r=0;38;5;48:*.c=0;38;5;48:*.d=0;38;5;48:*.t=0;38;5;48:*.h=0;38;5;48:*.p=0;38;5;48:*.cc=0;38;5;48:*.ll=0;38;5;48:*.jl=0;38;5;48:*css=0;38;5;48:*.md=0;38;5;185:*.gz=4;38;5;203:*.nb=0;38;5;48:*.mn=0;38;5;48:*.go=0;38;5;48:*.xz=4;38;5;203:*.so=1;38;5;203:*.rb=0;38;5;48:*.pm=0;38;5;48:*.bc=0;38;5;243:*.py=0;38;5;48:*.as=0;38;5;48:*.pl=0;38;5;48:*.rs=0;38;5;48:*.sh=0;38;5;48:*.7z=4;38;5;203:*.ps=0;38;5;186:*.cs=0;38;5;48:*.el=0;38;5;48:*.rm=0;38;5;208:*.hs=0;38;5;48:*.td=0;38;5;48:*.ui=0;38;5;149:*.ex=0;38;5;48:*.js=0;38;5;48:*.cp=0;38;5;48:*.cr=0;38;5;48:*.la=0;38;5;243:*.kt=0;38;5;48:*.ml=0;38;5;48:*.vb=0;38;5;48:*.gv=0;38;5;48:*.lo=0;38;5;243:*.hi=0;38;5;243:*.ts=0;38;5;48:*.ko=1;38;5;203:*.hh=0;38;5;48:*.pp=0;38;5;48:*.di=0;38;5;48:*.bz=4;38;5;203:*.fs=0;38;5;48:*.png=0;38;5;208:*.zsh=0;38;5;48:*.mpg=0;38;5;208:*.pid=0;38;5;243:*.xmp=0;38;5;149:*.iso=4;38;5;203:*.m4v=0;38;5;208:*.dot=0;38;5;48:*.ods=0;38;5;186:*.inc=0;38;5;48:*.sxw=0;38;5;186:*.aif=0;38;5;208:*.git=0;38;5;243:*.gvy=0;38;5;48:*.tbz=4;38;5;203:*.log=0;38;5;243:*.txt=0;38;5;185:*.ico=0;38;5;208:*.csx=0;38;5;48:*.vob=0;38;5;208:*.pgm=0;38;5;208:*.pps=0;38;5;186:*.ics=0;38;5;186:*.img=4;38;5;203:*.fon=0;38;5;208:*.hpp=0;38;5;48:*.bsh=0;38;5;48:*.sql=0;38;5;48:*TODO=1:*.php=0;38;5;48:*.pkg=4;38;5;203:*.ps1=0;38;5;48:*.csv=0;38;5;185:*.ilg=0;38;5;243:*.ini=0;38;5;149:*.pyc=0;38;5;243:*.psd=0;38;5;208:*.htc=0;38;5;48:*.swp=0;38;5;243:*.mli=0;38;5;48:*hgrc=0;38;5;149:*.bst=0;38;5;149:*.ipp=0;38;5;48:*.fsi=0;38;5;48:*.tcl=0;38;5;48:*.exs=0;38;5;48:*.out=0;38;5;243:*.jar=4;38;5;203:*.xls=0;38;5;186:*.ppm=0;38;5;208:*.apk=4;38;5;203:*.aux=0;38;5;243:*.rpm=4;38;5;203:*.dll=1;38;5;203:*.eps=0;38;5;208:*.exe=1;38;5;203:*.doc=0;38;5;186:*.wma=0;38;5;208:*.deb=4;38;5;203:*.pod=0;38;5;48:*.ind=0;38;5;243:*.nix=0;38;5;149:*.lua=0;38;5;48:*.epp=0;38;5;48:*.dpr=0;38;5;48:*.htm=0;38;5;185:*.ogg=0;38;5;208:*.bin=4;38;5;203:*.otf=0;38;5;208:*.yml=0;38;5;149:*.pro=0;38;5;149:*.cxx=0;38;5;48:*.tex=0;38;5;48:*.fnt=0;38;5;208:*.erl=0;38;5;48:*.sty=0;38;5;243:*.bag=4;38;5;203:*.rst=0;38;5;185:*.pdf=0;38;5;186:*.pbm=0;38;5;208:*.xcf=0;38;5;208:*.clj=0;38;5;48:*.gif=0;38;5;208:*.rar=4;38;5;203:*.elm=0;38;5;48:*.bib=0;38;5;149:*.tsx=0;38;5;48:*.dmg=4;38;5;203:*.tmp=0;38;5;243:*.bcf=0;38;5;243:*.mkv=0;38;5;208:*.svg=0;38;5;208:*.cpp=0;38;5;48:*.vim=0;38;5;48:*.bmp=0;38;5;208:*.ltx=0;38;5;48:*.fls=0;38;5;243:*.flv=0;38;5;208:*.wav=0;38;5;208:*.m4a=0;38;5;208:*.mid=0;38;5;208:*.hxx=0;38;5;48:*.pas=0;38;5;48:*.wmv=0;38;5;208:*.tif=0;38;5;208:*.kex=0;38;5;186:*.mp4=0;38;5;208:*.bak=0;38;5;243:*.xlr=0;38;5;186:*.dox=0;38;5;149:*.swf=0;38;5;208:*.tar=4;38;5;203:*.tgz=4;38;5;203:*.cfg=0;38;5;149:*.xml=0; 38;5;185:*.jpg=0;38;5;208:*.mir=0;38;5;48:*.sxi=0;38;5;186:*.bz2=4;38;5;203:*.odt=0;38;5;186:*.mov=0;38;5;208:*.toc=0;38;5;243:*.bat=1;38;5;203:*.asa=0;38;5;48:*.awk=0;38;5;48:*.sbt=0;38;5;48:*.vcd=4;38;5;203:*.kts=0;38;5;48:*.arj=4;38;5;203:*.blg=0;38;5;243:*.c++=0;38;5;48:*.odp=0;38;5;186:*.bbl=0;38;5;243:*.idx=0;38;5;243:*.com=1;38;5;203:*.mp3=0;38;5;208:*.avi=0;38;5;208:*.def=0;38;5;48:*.cgi=0;38;5;48:*.zip=4;38;5;203:*.ttf=0;38;5;208:*.ppt=0;38;5;186:*.tml=0;38;5;149:*.fsx=0;38;5;48:*.h++=0;38;5;48:*.rtf=0;38;5;186:*.inl=0;38;5;48:*.yaml=0;38;5;149:*.html=0;38;5;185:*.mpeg=0;38;5;208:*.java=0;38;5;48:*.hgrc=0;38;5;149:*.orig=0;38;5;243:*.conf=0;38;5;149:*.dart=0;38;5;48:*.psm1=0;38;5;48:*.rlib=0;38;5;243:*.fish=0;38;5;48:*.bash=0;38;5;48:*.make=0;38;5;149:*.docx=0;38;5;186:*.json=0;38;5;149:*.psd1=0;38;5;48:*.lisp=0;38;5;48:*.tbz2=4;38;5;203:*.diff=0;38;5;48:*.epub=0;38;5;186:*.xlsx=0;38;5;186:*.pptx=0;38;5;186:*.toml=0;38;5;149:*.h264=0;38;5;208:*.purs=0;38;5;48:*.flac=0;38;5;208:*.tiff=0;38;5;208:*.jpeg=0;38;5;208:*.lock=0;38;5;243:*.less=0;38;5;48:*.dyn_o=0;38;5;243:*.scala=0;38;5;48:*.mdown=0;38;5;185:*.shtml=0;38;5;185:*.class=0;38;5;243:*.cache=0;38;5;243:*.cmake=0;38;5;149:*passwd=0;38;5;149:*.swift=0;38;5;48:*shadow=0;38;5;149:*.xhtml=0;38;5;185:*.patch=0;38;5;48:*.cabal=0;38;5;48:*README=0;38;5;16;48;5;186:*.toast=4;38;5;203:*.ipynb=0;38;5;48:*COPYING=0;38;5;249:*.gradle=0;38;5;48:*.matlab=0;38;5;48:*.config=0;38;5;149:*LICENSE=0;38;5;249:*.dyn_hi=0;38;5;243:*.flake8=0;38;5;149:*.groovy=0;38;5;48:*INSTALL=0;38;5;16;48;5;186:*TODO.md=1:*.ignore=0;38;5;149:*Doxyfile=0;38;5;149:*TODO.txt=1:*setup.py=0;38;5;149:*Makefile=0;38;5;149:*.gemspec=0;38;5;149:*.desktop=0;38;5;149:*.rgignore=0;38;5;149:*.markdown=0;38;5;185:*COPYRIGHT=0;38;5;249:*configure=0;38;5;149:*.DS_Store=0;38;5;243:*.kdevelop=0;38;5;149:*.fdignore=0;38;5;149:*README.md=0;38;5;16;48;5;186:*.cmake.in=0;38;5;149:*SConscript=0;38;5;149:*CODEOWNERS=0;38;5;149:*.localized=0;38;5;243:*.gitignore=0;38;5;149:*Dockerfile=0;38;5;149:*.gitconfig=0;38;5;149:*INSTALL.md=0;38;5;16;48;5;186:*README.txt=0;38;5;16;48;5;186:*SConstruct=0;38;5;149:*.scons_opt=0;38;5;243:*.travis.yml=0;38;5;186:*.gitmodules=0;38;5;149:*.synctex.gz=0;38;5;243:*LICENSE-MIT=0;38;5;249:*MANIFEST.in=0;38;5;149:*Makefile.in=0;38;5;243:*Makefile.am=0;38;5;149:*INSTALL.txt=0;38;5;16;48;5;186:*configure.ac=0;38;5;149:*.applescript=0;38;5;48:*appveyor.yml=0;38;5;186:*.fdb_latexmk=0;38;5;243:*CONTRIBUTORS=0;38;5;16;48;5;186:*.clang-format=0;38;5;149:*LICENSE-APACHE=0;38;5;249:*CMakeLists.txt=0;38;5;149:*CMakeCache.txt=0;38;5;243:*.gitattributes=0;38;5;149:*CONTRIBUTORS.md=0;38;5;16;48;5;186:*.sconsign.dblite=0;38;5;243:*requirements.txt=0;38;5;149:*CONTRIBUTORS.txt=0;38;5;16;48;5;186:*package-lock.json=0;38;5;243:*.CFUserTextEncoding=0;38;5;243 "; fn main() { let result = run(); match result { Ok(exit_code) => { exit_code.exit(); } Err(err) => { eprintln!("[fd error]: {err:#}"); ExitCode::GeneralError.exit(); } } } fn run() -> Result<ExitCode> { let opts = Opts::parse(); #[cfg(feature = "completions")] if let Some(shell) = opts.gen_completions()? { return print_completions(shell); } set_working_dir(&opts)?; let search_paths = opts.search_paths()?; if search_paths.is_empty() { bail!("No valid search paths given."); } ensure_search_pattern_is_not_a_path(&opts)?; let pattern = &opts.pattern; let exprs = &opts.exprs; let empty = Vec::new(); let pattern_regexps = exprs .as_ref() .unwrap_or(&empty) .iter() .chain([pattern]) .map(|pat| build_pattern_regex(pat, &opts)) .collect::<Result<Vec<String>>>()?; let config = construct_config(opts, &pattern_regexps)?; ensure_use_hidden_option_for_leading_dot_pattern(&config, &pattern_regexps)?; let regexps = pattern_regexps .into_iter() .map(|pat| build_regex(pat, &config)) .collect::<Result<Vec<Regex>>>()?; walk::scan(&search_paths, regexps, config) } #[cfg(feature = "completions")] #[cold] fn print_completions(shell: clap_complete::Shell) -> Result<ExitCode> { // The program name is the first argument. let first_arg = env::args().next(); let program_name = first_arg .as_ref() .map(Path::new) .and_then(|path| path.file_stem()) .and_then(|file| file.to_str()) .unwrap_or("fd"); let mut cmd = Opts::command(); cmd.build(); clap_complete::generate(shell, &mut cmd, program_name, &mut std::io::stdout()); Ok(ExitCode::Success) } fn set_working_dir(opts: &Opts) -> Result<()> { if let Some(ref base_directory) = opts.base_directory { if !filesystem::is_existing_directory(base_directory) { return Err(anyhow!( "The '--base-directory' path '{}' is not a directory.", base_directory.to_string_lossy() )); } env::set_current_dir(base_directory).with_context(|| { format!( "Could not set '{}' as the current working directory", base_directory.to_string_lossy() ) })?; } Ok(()) } /// Detect if the user accidentally supplied a path instead of a search pattern fn ensure_search_pattern_is_not_a_path(opts: &Opts) -> Result<()> { if !opts.full_path && opts.pattern.contains(std::path::MAIN_SEPARATOR) && Path::new(&opts.pattern).is_dir() { Err(anyhow!( "The search pattern '{pattern}' contains a path-separation character ('{sep}') \ and will not lead to any search results.\n\n\ If you want to search for all files inside the '{pattern}' directory, use a match-all pattern:\n\n \ fd . '{pattern}'\n\n\ Instead, if you want your pattern to match the full file path, use:\n\n \ fd --full-path '{pattern}'", pattern = &opts.pattern, sep = std::path::MAIN_SEPARATOR, )) } else { Ok(()) } } fn build_pattern_regex(pattern: &str, opts: &Opts) -> Result<String> { Ok(if opts.glob && !pattern.is_empty() { let glob = GlobBuilder::new(pattern).literal_separator(true).build()?; glob.regex().to_owned() } else if opts.fixed_strings { // Treat pattern as literal string if '--fixed-strings' is used regex::escape(pattern) } else { String::from(pattern) }) } fn check_path_separator_length(path_separator: Option<&str>) -> Result<()> { match (cfg!(windows), path_separator) { (true, Some(sep)) if sep.len() > 1 => Err(anyhow!( "A path separator must be exactly one byte, but \ the given separator is {} bytes: '{}'.\n\ In some shells on Windows, '/' is automatically \ expanded. Try to use '//' instead.", sep.len(), sep )), _ => Ok(()), } } fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config> { // The search will be case-sensitive if the command line flag is set or // if any of the patterns has an uppercase character (smart case). let case_sensitive = !opts.ignore_case && (opts.case_sensitive || pattern_regexps .iter() .any(|pat| pattern_has_uppercase_char(pat))); let path_separator = opts .path_separator .take() .or_else(filesystem::default_path_separator); let actual_path_separator = path_separator .clone() .unwrap_or_else(|| std::path::MAIN_SEPARATOR.to_string()); check_path_separator_length(path_separator.as_deref())?; let size_limits = std::mem::take(&mut opts.size); let time_constraints = extract_time_constraints(&opts)?; #[cfg(unix)] let owner_constraint: Option<OwnerFilter> = opts.owner.and_then(OwnerFilter::filter_ignore); #[cfg(windows)] let ansi_colors_support = nu_ansi_term::enable_ansi_support().is_ok() || std::env::var_os("TERM").is_some(); #[cfg(not(windows))] let ansi_colors_support = true; let interactive_terminal = std::io::stdout().is_terminal(); let colored_output = match opts.color { ColorWhen::Always => true, ColorWhen::Never => false, ColorWhen::Auto => { let no_color = env::var_os("NO_COLOR").is_some_and(|x| !x.is_empty()); ansi_colors_support && !no_color && interactive_terminal } }; let ls_colors = if colored_output { Some(LsColors::from_env().unwrap_or_else(|| LsColors::from_string(DEFAULT_LS_COLORS))) } else { None }; let hyperlink = match opts.hyperlink { HyperlinkWhen::Always => true, HyperlinkWhen::Never => false, HyperlinkWhen::Auto => colored_output, }; let command = extract_command(&mut opts, colored_output)?; let has_command = command.is_some(); Ok(Config { case_sensitive, search_full_path: opts.full_path, ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()), read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()), read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs), require_git_to_read_vcsignore: !opts.no_require_git, read_parent_ignore: !opts.no_ignore_parent, read_global_ignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_global_ignore_file), follow_links: opts.follow, one_file_system: opts.one_file_system, null_separator: opts.null_separator, quiet: opts.quiet, max_depth: opts.max_depth(), min_depth: opts.min_depth(), prune: opts.prune, threads: opts.threads().get(), max_buffer_time: opts.max_buffer_time, ls_colors, hyperlink, interactive_terminal, file_types: opts.filetype.as_ref().map(|values| { use crate::cli::FileType::*; let mut file_types = FileTypes::default(); for value in values { match value { File => file_types.files = true, Directory => file_types.directories = true, Symlink => file_types.symlinks = true, Executable => { file_types.executables_only = true; file_types.files = true; } Empty => file_types.empty_only = true, BlockDevice => file_types.block_devices = true, CharDevice => file_types.char_devices = true, Socket => file_types.sockets = true, Pipe => file_types.pipes = true, } } // If only 'empty' was specified, search for both files and directories: if file_types.empty_only && !(file_types.files || file_types.directories) { file_types.files = true; file_types.directories = true; } file_types }), extensions: opts .extensions .as_ref() .map(|exts| { let patterns = exts .iter() .map(|e| e.trim_start_matches('.')) .map(|e| format!(r".\.{}$", regex::escape(e))); RegexSetBuilder::new(patterns) .case_insensitive(true) .build() }) .transpose()?, format: opts .format .as_deref() .map(crate::fmt::FormatTemplate::parse), command: command.map(Arc::new), batch_size: opts.batch_size, exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(), ignore_files: std::mem::take(&mut opts.ignore_file), size_constraints: size_limits, time_constraints, #[cfg(unix)] owner_constraint, show_filesystem_errors: opts.show_errors, path_separator, actual_path_separator, max_results: opts.max_results(), strip_cwd_prefix: opts.strip_cwd_prefix(|| !(opts.null_separator || has_command)), }) } fn extract_command(opts: &mut Opts, colored_output: bool) -> Result<Option<CommandSet>> { opts.exec .command .take() .map(Ok) .or_else(|| { if !opts.list_details { return None; } let res = determine_ls_command(colored_output) .map(|cmd| CommandSet::new_batch([cmd]).unwrap()); Some(res) }) .transpose() } fn determine_ls_command(colored_output: bool) -> Result<Vec<&'static str>> { #[allow(unused)] let gnu_ls = |command_name| { let color_arg = if colored_output { "--color=always" } else { "--color=never" }; // Note: we use short options here (instead of --long-options) to support more // platforms (like BusyBox). vec![ command_name, "-l", // long listing format "-h", // human readable file sizes "-d", // list directories themselves, not their contents color_arg, ] }; let cmd: Vec<&str> = if cfg!(unix) { if !cfg!(any( target_os = "macos", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd", target_os = "openbsd" )) { // Assume ls is GNU ls gnu_ls("ls") } else { // MacOS, DragonFlyBSD, FreeBSD use std::process::{Command, Stdio}; // Use GNU ls, if available (support for --color=auto, better LS_COLORS support) let gnu_ls_exists = Command::new("gls") .arg("--version") .stdout(Stdio::null()) .stderr(Stdio::null()) .status() .is_ok(); if gnu_ls_exists { gnu_ls("gls") } else { let mut cmd = vec![ "ls", // BSD version of ls "-l", // long listing format "-h", // '--human-readable' is not available, '-h' is "-d", // '--directory' is not available, but '-d' is ]; if !cfg!(any(target_os = "netbsd", target_os = "openbsd")) && colored_output { // -G is not available in NetBSD's and OpenBSD's ls cmd.push("-G"); } cmd } } } else if cfg!(windows) { use std::process::{Command, Stdio}; // Use GNU ls, if available let gnu_ls_exists = Command::new("ls") .arg("--version") .stdout(Stdio::null()) .stderr(Stdio::null()) .status() .is_ok(); if gnu_ls_exists { gnu_ls("ls") } else { return Err(anyhow!( "'fd --list-details' is not supported on Windows unless GNU 'ls' is installed." )); } } else { return Err(anyhow!( "'fd --list-details' is not supported on this platform." )); }; Ok(cmd) } fn extract_time_constraints(opts: &Opts) -> Result<Vec<TimeFilter>> { let mut time_constraints: Vec<TimeFilter> = Vec::new(); if let Some(ref t) = opts.changed_within { if let Some(f) = TimeFilter::after(t) { time_constraints.push(f); } else { return Err(anyhow!( "'{}' is not a valid date or duration. See 'fd --help'.", t )); } } if let Some(ref t) = opts.changed_before { if let Some(f) = TimeFilter::before(t) { time_constraints.push(f); } else { return Err(anyhow!( "'{}' is not a valid date or duration. See 'fd --help'.", t )); } } Ok(time_constraints) } fn ensure_use_hidden_option_for_leading_dot_pattern( config: &Config, pattern_regexps: &[String], ) -> Result<()> { if cfg!(unix) && config.ignore_hidden && pattern_regexps .iter() .any(|pat| pattern_matches_strings_with_leading_dot(pat)) { Err(anyhow!( "The pattern(s) seems to only match files with a leading dot, but hidden files are \ filtered by default. Consider adding -H/--hidden to search hidden files as well \ or adjust your search pattern(s)." )) } else { Ok(()) } } fn build_regex(pattern_regex: String, config: &Config) -> Result<regex::bytes::Regex> { RegexBuilder::new(&pattern_regex) .case_insensitive(!config.case_sensitive) .dot_matches_new_line(true) .build() .map_err(|e| { anyhow!( "{}\n\nNote: You can use the '--fixed-strings' option to search for a \ literal string instead of a regular expression. Alternatively, you can \ also use the '--glob' option to match on a glob pattern.", e ) }) }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/output.rs
src/output.rs
use std::borrow::Cow; use std::io::{self, Write}; use lscolors::{Indicator, LsColors, Style}; use crate::config::Config; use crate::dir_entry::DirEntry; use crate::fmt::FormatTemplate; use crate::hyperlink::PathUrl; fn replace_path_separator(path: &str, new_path_separator: &str) -> String { path.replace(std::path::MAIN_SEPARATOR, new_path_separator) } // TODO: this function is performance critical and can probably be optimized pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) -> io::Result<()> { let mut has_hyperlink = false; if config.hyperlink && let Some(url) = PathUrl::new(entry.path()) { write!(stdout, "\x1B]8;;{url}\x1B\\")?; has_hyperlink = true; } if let Some(ref format) = config.format { print_entry_format(stdout, entry, config, format)?; } else if let Some(ref ls_colors) = config.ls_colors { print_entry_colorized(stdout, entry, config, ls_colors)?; } else { print_entry_uncolorized(stdout, entry, config)?; }; if has_hyperlink { write!(stdout, "\x1B]8;;\x1B\\")?; } if config.null_separator { write!(stdout, "\0") } else { writeln!(stdout) } } // Display a trailing slash if the path is a directory and the config option is enabled. // If the path_separator option is set, display that instead. // The trailing slash will not be colored. #[inline] fn print_trailing_slash<W: Write>( stdout: &mut W, entry: &DirEntry, config: &Config, style: Option<&Style>, ) -> io::Result<()> { if entry.file_type().is_some_and(|ft| ft.is_dir()) { write!( stdout, "{}", style .map(Style::to_nu_ansi_term_style) .unwrap_or_default() .paint(&config.actual_path_separator) )?; } Ok(()) } // TODO: this function is performance critical and can probably be optimized fn print_entry_format<W: Write>( stdout: &mut W, entry: &DirEntry, config: &Config, format: &FormatTemplate, ) -> io::Result<()> { let output = format.generate( entry.stripped_path(config), config.path_separator.as_deref(), ); // TODO: support writing raw bytes on unix? write!(stdout, "{}", output.to_string_lossy()) } // TODO: this function is performance critical and can probably be optimized fn print_entry_colorized<W: Write>( stdout: &mut W, entry: &DirEntry, config: &Config, ls_colors: &LsColors, ) -> io::Result<()> { // Split the path between the parent and the last component let mut offset = 0; let path = entry.stripped_path(config); let path_str = path.to_string_lossy(); if let Some(parent) = path.parent() { offset = parent.to_string_lossy().len(); for c in path_str[offset..].chars() { if std::path::is_separator(c) { offset += c.len_utf8(); } else { break; } } } if offset > 0 { let mut parent_str = Cow::from(&path_str[..offset]); if let Some(ref separator) = config.path_separator { *parent_str.to_mut() = replace_path_separator(&parent_str, separator); } let style = ls_colors .style_for_indicator(Indicator::Directory) .map(Style::to_nu_ansi_term_style) .unwrap_or_default(); write!(stdout, "{}", style.paint(parent_str))?; } let style = entry .style(ls_colors) .map(Style::to_nu_ansi_term_style) .unwrap_or_default(); write!(stdout, "{}", style.paint(&path_str[offset..]))?; print_trailing_slash( stdout, entry, config, ls_colors.style_for_indicator(Indicator::Directory), )?; Ok(()) } // TODO: this function is performance critical and can probably be optimized fn print_entry_uncolorized_base<W: Write>( stdout: &mut W, entry: &DirEntry, config: &Config, ) -> io::Result<()> { let path = entry.stripped_path(config); let mut path_string = path.to_string_lossy(); if let Some(ref separator) = config.path_separator { *path_string.to_mut() = replace_path_separator(&path_string, separator); } write!(stdout, "{path_string}")?; print_trailing_slash(stdout, entry, config, None) } #[cfg(not(unix))] fn print_entry_uncolorized<W: Write>( stdout: &mut W, entry: &DirEntry, config: &Config, ) -> io::Result<()> { print_entry_uncolorized_base(stdout, entry, config) } #[cfg(unix)] fn print_entry_uncolorized<W: Write>( stdout: &mut W, entry: &DirEntry, config: &Config, ) -> io::Result<()> { use std::os::unix::ffi::OsStrExt; if config.interactive_terminal || config.path_separator.is_some() { // Fall back to the base implementation print_entry_uncolorized_base(stdout, entry, config) } else { // Print path as raw bytes, allowing invalid UTF-8 filenames to be passed to other processes stdout.write_all(entry.stripped_path(config).as_os_str().as_bytes())?; print_trailing_slash(stdout, entry, config, None) } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/filetypes.rs
src/filetypes.rs
use crate::dir_entry; use crate::filesystem; use faccess::PathExt; /// Whether or not to show #[derive(Default)] pub struct FileTypes { pub files: bool, pub directories: bool, pub symlinks: bool, pub block_devices: bool, pub char_devices: bool, pub sockets: bool, pub pipes: bool, pub executables_only: bool, pub empty_only: bool, } impl FileTypes { pub fn should_ignore(&self, entry: &dir_entry::DirEntry) -> bool { if let Some(ref entry_type) = entry.file_type() { (!self.files && entry_type.is_file()) || (!self.directories && entry_type.is_dir()) || (!self.symlinks && entry_type.is_symlink()) || (!self.block_devices && filesystem::is_block_device(*entry_type)) || (!self.char_devices && filesystem::is_char_device(*entry_type)) || (!self.sockets && filesystem::is_socket(*entry_type)) || (!self.pipes && filesystem::is_pipe(*entry_type)) || (self.executables_only && !entry.path().executable()) || (self.empty_only && !filesystem::is_empty(entry)) || !(entry_type.is_file() || entry_type.is_dir() || entry_type.is_symlink() || filesystem::is_block_device(*entry_type) || filesystem::is_char_device(*entry_type) || filesystem::is_socket(*entry_type) || filesystem::is_pipe(*entry_type)) } else { true } } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/regex_helper.rs
src/regex_helper.rs
use regex_syntax::ParserBuilder; use regex_syntax::hir::Hir; /// Determine if a regex pattern contains a literal uppercase character. pub fn pattern_has_uppercase_char(pattern: &str) -> bool { let mut parser = ParserBuilder::new().utf8(false).build(); parser .parse(pattern) .map(|hir| hir_has_uppercase_char(&hir)) .unwrap_or(false) } /// Determine if a regex expression contains a literal uppercase character. fn hir_has_uppercase_char(hir: &Hir) -> bool { use regex_syntax::hir::*; match hir.kind() { HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(bytes) { Ok(s) => s.chars().any(|c| c.is_uppercase()), Err(_) => bytes.iter().any(|b| char::from(*b).is_uppercase()), }, HirKind::Class(Class::Unicode(ranges)) => ranges .iter() .any(|r| r.start().is_uppercase() || r.end().is_uppercase()), HirKind::Class(Class::Bytes(ranges)) => ranges .iter() .any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()), HirKind::Capture(Capture { sub, .. }) | HirKind::Repetition(Repetition { sub, .. }) => { hir_has_uppercase_char(sub) } HirKind::Concat(hirs) | HirKind::Alternation(hirs) => { hirs.iter().any(hir_has_uppercase_char) } _ => false, } } /// Determine if a regex pattern only matches strings starting with a literal dot (hidden files) pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool { let mut parser = ParserBuilder::new().utf8(false).build(); parser .parse(pattern) .map(|hir| hir_matches_strings_with_leading_dot(&hir)) .unwrap_or(false) } /// See above. fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool { use regex_syntax::hir::*; // Note: this only really detects the simplest case where a regex starts with // "^\\.", i.e. a start text anchor and a literal dot character. There are a lot // of other patterns that ONLY match hidden files, e.g. ^(\\.foo|\\.bar) which are // not (yet) detected by this algorithm. match hir.kind() { HirKind::Concat(hirs) => { let mut hirs = hirs.iter(); if let Some(hir) = hirs.next() { if hir.kind() != &HirKind::Look(Look::Start) { return false; } } else { return false; } if let Some(hir) = hirs.next() { match hir.kind() { HirKind::Literal(Literal(bytes)) => bytes.starts_with(b"."), _ => false, } } else { false } } _ => false, } } #[test] fn pattern_has_uppercase_char_simple() { assert!(pattern_has_uppercase_char("A")); assert!(pattern_has_uppercase_char("foo.EXE")); assert!(!pattern_has_uppercase_char("a")); assert!(!pattern_has_uppercase_char("foo.exe123")); } #[test] fn pattern_has_uppercase_char_advanced() { assert!(pattern_has_uppercase_char("foo.[a-zA-Z]")); assert!(!pattern_has_uppercase_char(r"\Acargo")); assert!(!pattern_has_uppercase_char(r"carg\x6F")); } #[test] fn matches_strings_with_leading_dot_simple() { assert!(pattern_matches_strings_with_leading_dot("^\\.gitignore")); assert!(!pattern_matches_strings_with_leading_dot("^.gitignore")); assert!(!pattern_matches_strings_with_leading_dot("\\.gitignore")); assert!(!pattern_matches_strings_with_leading_dot("^gitignore")); }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/filter/time.rs
src/filter/time.rs
use jiff::{Span, Timestamp, Zoned, civil::DateTime, tz::TimeZone}; use std::time::{Duration, SystemTime, UNIX_EPOCH}; /// Filter based on time ranges. #[derive(Debug, PartialEq, Eq)] pub enum TimeFilter { Before(SystemTime), After(SystemTime), } #[cfg(not(test))] fn now() -> Zoned { Zoned::now() } #[cfg(test)] thread_local! { static TESTTIME: std::cell::RefCell<Option<Zoned>> = None.into(); } /// This allows us to set a specific time when running tests #[cfg(test)] fn now() -> Zoned { TESTTIME.with_borrow(|reftime| reftime.as_ref().cloned().unwrap_or_else(Zoned::now)) } impl TimeFilter { fn from_str(s: &str) -> Option<SystemTime> { if let Ok(span) = s.parse::<Span>() { let datetime = now().checked_sub(span).ok()?; Some(datetime.into()) } else if let Ok(timestamp) = s.parse::<Timestamp>() { Some(timestamp.into()) } else if let Ok(datetime) = s.parse::<DateTime>() { Some( TimeZone::system() .to_ambiguous_zoned(datetime) .later() .ok()? .into(), ) } else { let timestamp_secs: u64 = s.strip_prefix('@')?.parse().ok()?; Some(UNIX_EPOCH + Duration::from_secs(timestamp_secs)) } } pub fn before(s: &str) -> Option<TimeFilter> { TimeFilter::from_str(s).map(TimeFilter::Before) } pub fn after(s: &str) -> Option<TimeFilter> { TimeFilter::from_str(s).map(TimeFilter::After) } pub fn applies_to(&self, t: &SystemTime) -> bool { match self { TimeFilter::Before(limit) => t < limit, TimeFilter::After(limit) => t > limit, } } } #[cfg(test)] mod tests { use super::*; use std::time::Duration; struct TestTime(SystemTime); impl TestTime { fn new(time: Zoned) -> Self { TESTTIME.with_borrow_mut(|t| *t = Some(time.clone())); TestTime(time.into()) } fn set(&mut self, time: Zoned) { TESTTIME.with_borrow_mut(|t| *t = Some(time.clone())); self.0 = time.into(); } fn timestamp(&self) -> SystemTime { self.0 } } impl Drop for TestTime { fn drop(&mut self) { // Stop using manually set times TESTTIME.with_borrow_mut(|t| *t = None); } } #[test] fn is_time_filter_applicable() { let local_tz = TimeZone::system(); let mut test_time = TestTime::new( local_tz .to_ambiguous_zoned("2010-10-10 10:10:10".parse::<DateTime>().unwrap()) .later() .unwrap(), ); let mut ref_time = test_time.timestamp(); assert!(TimeFilter::after("1min").unwrap().applies_to(&ref_time)); assert!(!TimeFilter::before("1min").unwrap().applies_to(&ref_time)); let t1m_ago = ref_time - Duration::from_secs(60); assert!(!TimeFilter::after("30sec").unwrap().applies_to(&t1m_ago)); assert!(TimeFilter::after("2min").unwrap().applies_to(&t1m_ago)); assert!(TimeFilter::before("30sec").unwrap().applies_to(&t1m_ago)); assert!(!TimeFilter::before("2min").unwrap().applies_to(&t1m_ago)); let t10s_before = "2010-10-10 10:10:00"; assert!( !TimeFilter::before(t10s_before) .unwrap() .applies_to(&ref_time) ); assert!( TimeFilter::before(t10s_before) .unwrap() .applies_to(&t1m_ago) ); assert!( TimeFilter::after(t10s_before) .unwrap() .applies_to(&ref_time) ); assert!(!TimeFilter::after(t10s_before).unwrap().applies_to(&t1m_ago)); let same_day = "2010-10-10"; assert!(!TimeFilter::before(same_day).unwrap().applies_to(&ref_time)); assert!(!TimeFilter::before(same_day).unwrap().applies_to(&t1m_ago)); assert!(TimeFilter::after(same_day).unwrap().applies_to(&ref_time)); assert!(TimeFilter::after(same_day).unwrap().applies_to(&t1m_ago)); test_time.set( "2010-10-10T10:10:10+00:00" .parse::<Timestamp>() .unwrap() .to_zoned(local_tz.clone()), ); ref_time = test_time.timestamp(); let t1m_ago = ref_time - Duration::from_secs(60); let t10s_before = "2010-10-10T10:10:00+00:00"; assert!( !TimeFilter::before(t10s_before) .unwrap() .applies_to(&ref_time) ); assert!( TimeFilter::before(t10s_before) .unwrap() .applies_to(&t1m_ago) ); assert!( TimeFilter::after(t10s_before) .unwrap() .applies_to(&ref_time) ); assert!(!TimeFilter::after(t10s_before).unwrap().applies_to(&t1m_ago)); let ref_timestamp = 1707723412u64; // Mon Feb 12 07:36:52 UTC 2024 test_time.set( "2024-02-12T07:36:52+00:00" .parse::<Timestamp>() .unwrap() .to_zoned(local_tz), ); ref_time = test_time.timestamp(); let t1m_ago = ref_time - Duration::from_secs(60); let t1s_later = ref_time + Duration::from_secs(1); // Timestamp only supported via '@' prefix assert!(TimeFilter::before(&ref_timestamp.to_string()).is_none()); assert!( TimeFilter::before(&format!("@{ref_timestamp}")) .unwrap() .applies_to(&t1m_ago) ); assert!( !TimeFilter::before(&format!("@{ref_timestamp}")) .unwrap() .applies_to(&t1s_later) ); assert!( !TimeFilter::after(&format!("@{ref_timestamp}")) .unwrap() .applies_to(&t1m_ago) ); assert!( TimeFilter::after(&format!("@{ref_timestamp}")) .unwrap() .applies_to(&t1s_later) ); } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/filter/mod.rs
src/filter/mod.rs
pub use self::size::SizeFilter; pub use self::time::TimeFilter; #[cfg(unix)] pub use self::owner::OwnerFilter; mod size; mod time; #[cfg(unix)] mod owner;
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/filter/owner.rs
src/filter/owner.rs
use anyhow::{Result, anyhow}; use nix::unistd::{Group, User}; use std::fs; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct OwnerFilter { uid: Check<u32>, gid: Check<u32>, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum Check<T> { Equal(T), NotEq(T), Ignore, } impl OwnerFilter { const IGNORE: Self = OwnerFilter { uid: Check::Ignore, gid: Check::Ignore, }; /// Parses an owner constraint /// Returns an error if the string is invalid /// Returns Ok(None) when string is acceptable but a noop (such as "" or ":") pub fn from_string(input: &str) -> Result<Self> { let mut it = input.split(':'); let (fst, snd) = (it.next(), it.next()); if it.next().is_some() { return Err(anyhow!( "more than one ':' present in owner string '{}'. See 'fd --help'.", input )); } let uid = Check::parse(fst, |s| { if let Ok(uid) = s.parse() { Ok(uid) } else { User::from_name(s)? .map(|user| user.uid.as_raw()) .ok_or_else(|| anyhow!("'{}' is not a recognized user name", s)) } })?; let gid = Check::parse(snd, |s| { if let Ok(gid) = s.parse() { Ok(gid) } else { Group::from_name(s)? .map(|group| group.gid.as_raw()) .ok_or_else(|| anyhow!("'{}' is not a recognized group name", s)) } })?; Ok(OwnerFilter { uid, gid }) } /// If self is a no-op (ignore both uid and gid) then return `None`, otherwise wrap in a `Some` pub fn filter_ignore(self) -> Option<Self> { if self == Self::IGNORE { None } else { Some(self) } } pub fn matches(&self, md: &fs::Metadata) -> bool { use std::os::unix::fs::MetadataExt; self.uid.check(md.uid()) && self.gid.check(md.gid()) } } impl<T: PartialEq> Check<T> { fn check(&self, v: T) -> bool { match self { Check::Equal(x) => v == *x, Check::NotEq(x) => v != *x, Check::Ignore => true, } } fn parse<F>(s: Option<&str>, f: F) -> Result<Self> where F: Fn(&str) -> Result<T>, { let (s, equality) = match s { Some("") | None => return Ok(Check::Ignore), Some(s) if s.starts_with('!') => (&s[1..], false), Some(s) => (s, true), }; f(s).map(|x| { if equality { Check::Equal(x) } else { Check::NotEq(x) } }) } } #[cfg(test)] mod owner_parsing { use super::OwnerFilter; macro_rules! owner_tests { ($($name:ident: $value:expr => $result:pat,)*) => { $( #[test] fn $name() { let o = OwnerFilter::from_string($value); match o { $result => {}, _ => panic!("{:?} does not match {}", o, stringify!($result)), } } )* }; } use super::Check::*; owner_tests! { empty: "" => Ok(OwnerFilter::IGNORE), uid_only: "5" => Ok(OwnerFilter { uid: Equal(5), gid: Ignore }), uid_gid: "9:3" => Ok(OwnerFilter { uid: Equal(9), gid: Equal(3) }), gid_only: ":8" => Ok(OwnerFilter { uid: Ignore, gid: Equal(8) }), colon_only: ":" => Ok(OwnerFilter::IGNORE), trailing: "5:" => Ok(OwnerFilter { uid: Equal(5), gid: Ignore }), uid_negate: "!5" => Ok(OwnerFilter { uid: NotEq(5), gid: Ignore }), both_negate:"!4:!3" => Ok(OwnerFilter { uid: NotEq(4), gid: NotEq(3) }), uid_not_gid:"6:!8" => Ok(OwnerFilter { uid: Equal(6), gid: NotEq(8) }), more_colons:"3:5:" => Err(_), only_colons:"::" => Err(_), } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/filter/size.rs
src/filter/size.rs
use std::sync::OnceLock; use anyhow::anyhow; use regex::Regex; static SIZE_CAPTURES: OnceLock<Regex> = OnceLock::new(); #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SizeFilter { Max(u64), Min(u64), Equals(u64), } // SI prefixes (powers of 10) const KILO: u64 = 1000; const MEGA: u64 = KILO * 1000; const GIGA: u64 = MEGA * 1000; const TERA: u64 = GIGA * 1000; // Binary prefixes (powers of 2) const KIBI: u64 = 1024; const MEBI: u64 = KIBI * 1024; const GIBI: u64 = MEBI * 1024; const TEBI: u64 = GIBI * 1024; impl SizeFilter { pub fn from_string(s: &str) -> anyhow::Result<Self> { SizeFilter::parse_opt(s) .ok_or_else(|| anyhow!("'{}' is not a valid size constraint. See 'fd --help'.", s)) } fn parse_opt(s: &str) -> Option<Self> { let pattern = SIZE_CAPTURES.get_or_init(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap()); if !pattern.is_match(s) { return None; } let captures = pattern.captures(s)?; let limit_kind = captures.get(1).map_or("+", |m| m.as_str()); let quantity = captures .get(2) .and_then(|v| v.as_str().parse::<u64>().ok())?; let multiplier = match &captures.get(3).map_or("b", |m| m.as_str()).to_lowercase()[..] { v if v.starts_with("ki") => KIBI, v if v.starts_with('k') => KILO, v if v.starts_with("mi") => MEBI, v if v.starts_with('m') => MEGA, v if v.starts_with("gi") => GIBI, v if v.starts_with('g') => GIGA, v if v.starts_with("ti") => TEBI, v if v.starts_with('t') => TERA, "b" => 1, _ => return None, }; let size = quantity * multiplier; match limit_kind { "+" => Some(SizeFilter::Min(size)), "-" => Some(SizeFilter::Max(size)), "" => Some(SizeFilter::Equals(size)), _ => None, } } pub fn is_within(&self, size: u64) -> bool { match *self { SizeFilter::Max(limit) => size <= limit, SizeFilter::Min(limit) => size >= limit, SizeFilter::Equals(limit) => size == limit, } } } #[cfg(test)] mod tests { use super::*; macro_rules! gen_size_filter_parse_test { ($($name: ident: $val: expr,)*) => { $( #[test] fn $name() { let (txt, expected) = $val; let actual = SizeFilter::from_string(txt).unwrap(); assert_eq!(actual, expected); } )* }; } // Parsing and size conversion tests data. Ensure that each type gets properly interpreted. // Call with higher base values to ensure expected multiplication (only need a couple) gen_size_filter_parse_test! { byte_plus: ("+1b", SizeFilter::Min(1)), byte_plus_multiplier: ("+10b", SizeFilter::Min(10)), byte_minus: ("-1b", SizeFilter::Max(1)), kilo_plus: ("+1k", SizeFilter::Min(1000)), kilo_plus_suffix: ("+1kb", SizeFilter::Min(1000)), kilo_minus: ("-1k", SizeFilter::Max(1000)), kilo_minus_multiplier: ("-100k", SizeFilter::Max(100_000)), kilo_minus_suffix: ("-1kb", SizeFilter::Max(1000)), kilo_plus_upper: ("+1K", SizeFilter::Min(1000)), kilo_plus_suffix_upper: ("+1KB", SizeFilter::Min(1000)), kilo_minus_upper: ("-1K", SizeFilter::Max(1000)), kilo_minus_suffix_upper: ("-1Kb", SizeFilter::Max(1000)), kibi_plus: ("+1ki", SizeFilter::Min(1024)), kibi_plus_multiplier: ("+10ki", SizeFilter::Min(10_240)), kibi_plus_suffix: ("+1kib", SizeFilter::Min(1024)), kibi_minus: ("-1ki", SizeFilter::Max(1024)), kibi_minus_multiplier: ("-100ki", SizeFilter::Max(102_400)), kibi_minus_suffix: ("-1kib", SizeFilter::Max(1024)), kibi_plus_upper: ("+1KI", SizeFilter::Min(1024)), kibi_plus_suffix_upper: ("+1KiB", SizeFilter::Min(1024)), kibi_minus_upper: ("-1Ki", SizeFilter::Max(1024)), kibi_minus_suffix_upper: ("-1KIB", SizeFilter::Max(1024)), mega_plus: ("+1m", SizeFilter::Min(1_000_000)), mega_plus_suffix: ("+1mb", SizeFilter::Min(1_000_000)), mega_minus: ("-1m", SizeFilter::Max(1_000_000)), mega_minus_suffix: ("-1mb", SizeFilter::Max(1_000_000)), mega_plus_upper: ("+1M", SizeFilter::Min(1_000_000)), mega_plus_suffix_upper: ("+1MB", SizeFilter::Min(1_000_000)), mega_minus_upper: ("-1M", SizeFilter::Max(1_000_000)), mega_minus_suffix_upper: ("-1Mb", SizeFilter::Max(1_000_000)), mebi_plus: ("+1mi", SizeFilter::Min(1_048_576)), mebi_plus_suffix: ("+1mib", SizeFilter::Min(1_048_576)), mebi_minus: ("-1mi", SizeFilter::Max(1_048_576)), mebi_minus_suffix: ("-1mib", SizeFilter::Max(1_048_576)), mebi_plus_upper: ("+1MI", SizeFilter::Min(1_048_576)), mebi_plus_suffix_upper: ("+1MiB", SizeFilter::Min(1_048_576)), mebi_minus_upper: ("-1Mi", SizeFilter::Max(1_048_576)), mebi_minus_suffix_upper: ("-1MIB", SizeFilter::Max(1_048_576)), giga_plus: ("+1g", SizeFilter::Min(1_000_000_000)), giga_plus_suffix: ("+1gb", SizeFilter::Min(1_000_000_000)), giga_minus: ("-1g", SizeFilter::Max(1_000_000_000)), giga_minus_suffix: ("-1gb", SizeFilter::Max(1_000_000_000)), giga_plus_upper: ("+1G", SizeFilter::Min(1_000_000_000)), giga_plus_suffix_upper: ("+1GB", SizeFilter::Min(1_000_000_000)), giga_minus_upper: ("-1G", SizeFilter::Max(1_000_000_000)), giga_minus_suffix_upper: ("-1Gb", SizeFilter::Max(1_000_000_000)), gibi_plus: ("+1gi", SizeFilter::Min(1_073_741_824)), gibi_plus_suffix: ("+1gib", SizeFilter::Min(1_073_741_824)), gibi_minus: ("-1gi", SizeFilter::Max(1_073_741_824)), gibi_minus_suffix: ("-1gib", SizeFilter::Max(1_073_741_824)), gibi_plus_upper: ("+1GI", SizeFilter::Min(1_073_741_824)), gibi_plus_suffix_upper: ("+1GiB", SizeFilter::Min(1_073_741_824)), gibi_minus_upper: ("-1Gi", SizeFilter::Max(1_073_741_824)), gibi_minus_suffix_upper: ("-1GIB", SizeFilter::Max(1_073_741_824)), tera_plus: ("+1t", SizeFilter::Min(1_000_000_000_000)), tera_plus_suffix: ("+1tb", SizeFilter::Min(1_000_000_000_000)), tera_minus: ("-1t", SizeFilter::Max(1_000_000_000_000)), tera_minus_suffix: ("-1tb", SizeFilter::Max(1_000_000_000_000)), tera_plus_upper: ("+1T", SizeFilter::Min(1_000_000_000_000)), tera_plus_suffix_upper: ("+1TB", SizeFilter::Min(1_000_000_000_000)), tera_minus_upper: ("-1T", SizeFilter::Max(1_000_000_000_000)), tera_minus_suffix_upper: ("-1Tb", SizeFilter::Max(1_000_000_000_000)), tebi_plus: ("+1ti", SizeFilter::Min(1_099_511_627_776)), tebi_plus_suffix: ("+1tib", SizeFilter::Min(1_099_511_627_776)), tebi_minus: ("-1ti", SizeFilter::Max(1_099_511_627_776)), tebi_minus_suffix: ("-1tib", SizeFilter::Max(1_099_511_627_776)), tebi_plus_upper: ("+1TI", SizeFilter::Min(1_099_511_627_776)), tebi_plus_suffix_upper: ("+1TiB", SizeFilter::Min(1_099_511_627_776)), tebi_minus_upper: ("-1Ti", SizeFilter::Max(1_099_511_627_776)), tebi_minus_suffix_upper: ("-1TIB", SizeFilter::Max(1_099_511_627_776)), } /// Invalid parse testing macro_rules! gen_size_filter_failure { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let i = SizeFilter::from_string($value); assert!(i.is_err()); } )* }; } // Invalid parse data gen_size_filter_failure! { ensure_missing_number_returns_none: "+g", ensure_missing_unit_returns_none: "+18", ensure_bad_format_returns_none_1: "$10M", ensure_bad_format_returns_none_2: "badval", ensure_bad_format_returns_none_3: "9999", ensure_invalid_unit_returns_none_1: "+50a", ensure_invalid_unit_returns_none_2: "-10v", ensure_invalid_unit_returns_none_3: "+1Mv", ensure_bib_format_returns_none: "+1bib", ensure_bb_format_returns_none: "+1bb", } #[test] fn is_within_less_than() { let f = SizeFilter::from_string("-1k").unwrap(); assert!(f.is_within(999)); } #[test] fn is_within_less_than_equal() { let f = SizeFilter::from_string("-1k").unwrap(); assert!(f.is_within(1000)); } #[test] fn is_within_greater_than() { let f = SizeFilter::from_string("+1k").unwrap(); assert!(f.is_within(1001)); } #[test] fn is_within_greater_than_equal() { let f = SizeFilter::from_string("+1K").unwrap(); assert!(f.is_within(1000)); } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/exec/command.rs
src/exec/command.rs
use std::io; use std::io::Write; use argmax::Command; use crate::error::print_error; use crate::exit_codes::ExitCode; struct Outputs { stdout: Vec<u8>, stderr: Vec<u8>, } pub struct OutputBuffer { null_separator: bool, outputs: Vec<Outputs>, } impl OutputBuffer { pub fn new(null_separator: bool) -> Self { Self { null_separator, outputs: Vec::new(), } } fn push(&mut self, stdout: Vec<u8>, stderr: Vec<u8>) { self.outputs.push(Outputs { stdout, stderr }); } fn write(self) { // Avoid taking the lock if there is nothing to do. // If null_separator is true, then we still need to write the // null separator, because the output may have been written directly // to stdout if self.outputs.is_empty() && !self.null_separator { return; } let stdout = io::stdout(); let stderr = io::stderr(); // While we hold these locks, only this thread will be able // to write its outputs. let mut stdout = stdout.lock(); let mut stderr = stderr.lock(); for output in self.outputs.iter() { let _ = stdout.write_all(&output.stdout); let _ = stderr.write_all(&output.stderr); } if self.null_separator { // If null_separator is enabled, then we should write a \0 at the end // of the output for this entry let _ = stdout.write_all(b"\0"); } } } /// Executes a command. pub fn execute_commands<I: Iterator<Item = io::Result<Command>>>( cmds: I, mut output_buffer: OutputBuffer, enable_output_buffering: bool, ) -> ExitCode { for result in cmds { let mut cmd = match result { Ok(cmd) => cmd, Err(e) => return handle_cmd_error(None, e), }; // Spawn the supplied command. let output = if enable_output_buffering { cmd.output() } else { // If running on only one thread, don't buffer output // Allows for viewing and interacting with intermediate command output cmd.spawn().and_then(|c| c.wait_with_output()) }; // Then wait for the command to exit, if it was spawned. match output { Ok(output) => { if enable_output_buffering { output_buffer.push(output.stdout, output.stderr); } if output.status.code() != Some(0) { output_buffer.write(); return ExitCode::GeneralError; } } Err(why) => { output_buffer.write(); return handle_cmd_error(Some(&cmd), why); } } } output_buffer.write(); ExitCode::Success } pub fn handle_cmd_error(cmd: Option<&Command>, err: io::Error) -> ExitCode { match (cmd, err) { (Some(cmd), err) if err.kind() == io::ErrorKind::NotFound => { print_error(format!( "Command not found: {}", cmd.get_program().to_string_lossy() )); ExitCode::GeneralError } (_, err) => { print_error(format!("Problem while executing command: {err}")); ExitCode::GeneralError } } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/exec/mod.rs
src/exec/mod.rs
mod command; mod job; use std::ffi::OsString; use std::io; use std::iter; use std::path::{Path, PathBuf}; use std::process::Stdio; use anyhow::{Result, bail}; use argmax::Command; use crate::exec::command::OutputBuffer; use crate::exit_codes::{ExitCode, merge_exitcodes}; use crate::fmt::{FormatTemplate, Token}; use self::command::{execute_commands, handle_cmd_error}; pub use self::job::{batch, job}; /// Execution mode of the command #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ExecutionMode { /// Command is executed for each search result OneByOne, /// Command is run for a batch of results at once Batch, } #[derive(Debug, Clone, PartialEq)] pub struct CommandSet { mode: ExecutionMode, commands: Vec<CommandTemplate>, } impl CommandSet { pub fn new<I, T, S>(input: I) -> Result<CommandSet> where I: IntoIterator<Item = T>, T: IntoIterator<Item = S>, S: AsRef<str>, { Ok(CommandSet { mode: ExecutionMode::OneByOne, commands: input .into_iter() .map(CommandTemplate::new) .collect::<Result<_>>()?, }) } pub fn new_batch<I, T, S>(input: I) -> Result<CommandSet> where I: IntoIterator<Item = T>, T: IntoIterator<Item = S>, S: AsRef<str>, { Ok(CommandSet { mode: ExecutionMode::Batch, commands: input .into_iter() .map(|args| { let cmd = CommandTemplate::new(args)?; if cmd.number_of_tokens() > 1 { bail!("Only one placeholder allowed for batch commands"); } if cmd.args[0].has_tokens() { bail!("First argument of exec-batch is expected to be a fixed executable"); } Ok(cmd) }) .collect::<Result<Vec<_>>>()?, }) } pub fn in_batch_mode(&self) -> bool { self.mode == ExecutionMode::Batch } pub fn execute( &self, input: &Path, path_separator: Option<&str>, null_separator: bool, buffer_output: bool, ) -> ExitCode { let commands = self .commands .iter() .map(|c| c.generate(input, path_separator)); execute_commands(commands, OutputBuffer::new(null_separator), buffer_output) } pub fn execute_batch<I>(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode where I: Iterator<Item = PathBuf>, { let builders: io::Result<Vec<_>> = self .commands .iter() .map(|c| CommandBuilder::new(c, limit)) .collect(); match builders { Ok(mut builders) => { for path in paths { for builder in &mut builders { if let Err(e) = builder.push(&path, path_separator) { return handle_cmd_error(Some(&builder.cmd), e); } } } for builder in &mut builders { if let Err(e) = builder.finish() { return handle_cmd_error(Some(&builder.cmd), e); } } merge_exitcodes(builders.iter().map(|b| b.exit_code())) } Err(e) => handle_cmd_error(None, e), } } } /// Represents a multi-exec command as it is built. #[derive(Debug)] struct CommandBuilder { pre_args: Vec<OsString>, path_arg: FormatTemplate, post_args: Vec<OsString>, cmd: Command, count: usize, limit: usize, exit_code: ExitCode, } impl CommandBuilder { fn new(template: &CommandTemplate, limit: usize) -> io::Result<Self> { let mut pre_args = vec![]; let mut path_arg = None; let mut post_args = vec![]; for arg in &template.args { if arg.has_tokens() { path_arg = Some(arg.clone()); } else if path_arg.is_none() { pre_args.push(arg.generate("", None)); } else { post_args.push(arg.generate("", None)); } } let cmd = Self::new_command(&pre_args)?; Ok(Self { pre_args, path_arg: path_arg.unwrap(), post_args, cmd, count: 0, limit, exit_code: ExitCode::Success, }) } fn new_command(pre_args: &[OsString]) -> io::Result<Command> { let mut cmd = Command::new(&pre_args[0]); cmd.stdin(Stdio::inherit()); cmd.stdout(Stdio::inherit()); cmd.stderr(Stdio::inherit()); cmd.try_args(&pre_args[1..])?; Ok(cmd) } fn push(&mut self, path: &Path, separator: Option<&str>) -> io::Result<()> { if self.limit > 0 && self.count >= self.limit { self.finish()?; } let arg = self.path_arg.generate(path, separator); if !self .cmd .args_would_fit(iter::once(&arg).chain(&self.post_args)) { self.finish()?; } self.cmd.try_arg(arg)?; self.count += 1; Ok(()) } fn finish(&mut self) -> io::Result<()> { if self.count > 0 { self.cmd.try_args(&self.post_args)?; if !self.cmd.status()?.success() { self.exit_code = ExitCode::GeneralError; } self.cmd = Self::new_command(&self.pre_args)?; self.count = 0; } Ok(()) } fn exit_code(&self) -> ExitCode { self.exit_code } } /// Represents a template that is utilized to generate command strings. /// /// The template is meant to be coupled with an input in order to generate a command. The /// `generate_and_execute()` method will be used to generate a command and execute it. #[derive(Debug, Clone, PartialEq)] struct CommandTemplate { args: Vec<FormatTemplate>, } impl CommandTemplate { fn new<I, S>(input: I) -> Result<CommandTemplate> where I: IntoIterator<Item = S>, S: AsRef<str>, { let mut args = Vec::new(); let mut has_placeholder = false; for arg in input { let arg = arg.as_ref(); let tmpl = FormatTemplate::parse(arg); has_placeholder |= tmpl.has_tokens(); args.push(tmpl); } // We need to check that we have at least one argument, because if not // it will try to execute each file and directory it finds. // // Sadly, clap can't currently handle this for us, see // https://github.com/clap-rs/clap/issues/3542 if args.is_empty() { bail!("No executable provided for --exec or --exec-batch"); } // If a placeholder token was not supplied, append one at the end of the command. if !has_placeholder { args.push(FormatTemplate::Tokens(vec![Token::Placeholder])); } Ok(CommandTemplate { args }) } fn number_of_tokens(&self) -> usize { self.args.iter().filter(|arg| arg.has_tokens()).count() } /// Generates and executes a command. /// /// Using the internal `args` field, and a supplied `input` variable, a `Command` will be /// build. fn generate(&self, input: &Path, path_separator: Option<&str>) -> io::Result<Command> { let mut cmd = Command::new(self.args[0].generate(input, path_separator)); for arg in &self.args[1..] { cmd.try_arg(arg.generate(input, path_separator))?; } Ok(cmd) } } #[cfg(test)] mod tests { use super::*; fn generate_str(template: &CommandTemplate, input: &str) -> Vec<String> { template .args .iter() .map(|arg| arg.generate(input, None).into_string().unwrap()) .collect() } #[test] fn tokens_with_placeholder() { assert_eq!( CommandSet::new(vec![vec![&"echo", &"${SHELL}:"]]).unwrap(), CommandSet { commands: vec![CommandTemplate { args: vec![ FormatTemplate::Text("echo".into()), FormatTemplate::Text("${SHELL}:".into()), FormatTemplate::Tokens(vec![Token::Placeholder]), ] }], mode: ExecutionMode::OneByOne, } ); } #[test] fn tokens_with_no_extension() { assert_eq!( CommandSet::new(vec![vec!["echo", "{.}"]]).unwrap(), CommandSet { commands: vec![CommandTemplate { args: vec![ FormatTemplate::Text("echo".into()), FormatTemplate::Tokens(vec![Token::NoExt]), ], }], mode: ExecutionMode::OneByOne, } ); } #[test] fn tokens_with_basename() { assert_eq!( CommandSet::new(vec![vec!["echo", "{/}"]]).unwrap(), CommandSet { commands: vec![CommandTemplate { args: vec![ FormatTemplate::Text("echo".into()), FormatTemplate::Tokens(vec![Token::Basename]), ], }], mode: ExecutionMode::OneByOne, } ); } #[test] fn tokens_with_parent() { assert_eq!( CommandSet::new(vec![vec!["echo", "{//}"]]).unwrap(), CommandSet { commands: vec![CommandTemplate { args: vec![ FormatTemplate::Text("echo".into()), FormatTemplate::Tokens(vec![Token::Parent]), ], }], mode: ExecutionMode::OneByOne, } ); } #[test] fn tokens_with_basename_no_extension() { assert_eq!( CommandSet::new(vec![vec!["echo", "{/.}"]]).unwrap(), CommandSet { commands: vec![CommandTemplate { args: vec![ FormatTemplate::Text("echo".into()), FormatTemplate::Tokens(vec![Token::BasenameNoExt]), ], }], mode: ExecutionMode::OneByOne, } ); } #[test] fn tokens_with_literal_braces() { let template = CommandTemplate::new(vec!["{{}}", "{{", "{.}}"]).unwrap(); assert_eq!( generate_str(&template, "foo"), vec!["{}", "{", "{.}", "foo"] ); } #[test] fn tokens_with_literal_braces_and_placeholder() { let template = CommandTemplate::new(vec!["{{{},end}"]).unwrap(); assert_eq!(generate_str(&template, "foo"), vec!["{foo,end}"]); } #[test] fn tokens_multiple() { assert_eq!( CommandSet::new(vec![vec!["cp", "{}", "{/.}.ext"]]).unwrap(), CommandSet { commands: vec![CommandTemplate { args: vec![ FormatTemplate::Text("cp".into()), FormatTemplate::Tokens(vec![Token::Placeholder]), FormatTemplate::Tokens(vec![ Token::BasenameNoExt, Token::Text(".ext".into()) ]), ], }], mode: ExecutionMode::OneByOne, } ); } #[test] fn tokens_single_batch() { assert_eq!( CommandSet::new_batch(vec![vec!["echo", "{.}"]]).unwrap(), CommandSet { commands: vec![CommandTemplate { args: vec![ FormatTemplate::Text("echo".into()), FormatTemplate::Tokens(vec![Token::NoExt]), ], }], mode: ExecutionMode::Batch, } ); } #[test] fn tokens_multiple_batch() { assert!(CommandSet::new_batch(vec![vec!["echo", "{.}", "{}"]]).is_err()); } #[test] fn template_no_args() { assert!(CommandTemplate::new::<Vec<_>, &'static str>(vec![]).is_err()); } #[test] fn command_set_no_args() { assert!(CommandSet::new(vec![vec!["echo"], vec![]]).is_err()); } #[test] fn generate_custom_path_separator() { let arg = FormatTemplate::Tokens(vec![Token::Placeholder]); macro_rules! check { ($input:expr, $expected:expr) => { assert_eq!(arg.generate($input, Some("#")), OsString::from($expected)); }; } check!("foo", "foo"); check!("foo/bar", "foo#bar"); check!("/foo/bar/baz", "#foo#bar#baz"); } #[cfg(windows)] #[test] fn generate_custom_path_separator_windows() { let arg = FormatTemplate::Tokens(vec![Token::Placeholder]); macro_rules! check { ($input:expr, $expected:expr) => { assert_eq!(arg.generate($input, Some("#")), OsString::from($expected)); }; } // path starting with a drive letter check!(r"C:\foo\bar", "C:#foo#bar"); // UNC path check!(r"\\server\share\path", "##server#share#path"); // Drive Relative path - no separator after the colon omits the RootDir path component. // This is uncommon, but valid check!(r"C:foo\bar", "C:foo#bar"); // forward slashes should get normalized and interpreted as separators check!("C:/foo/bar", "C:#foo#bar"); check!("C:foo/bar", "C:foo#bar"); // Rust does not interpret "//server/share" as a UNC path, but rather as a normal // absolute path that begins with RootDir, and the two slashes get combined together as // a single path separator during normalization. //check!("//server/share/path", "##server#share#path"); } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/exec/job.rs
src/exec/job.rs
use crate::config::Config; use crate::error::print_error; use crate::exit_codes::{ExitCode, merge_exitcodes}; use crate::walk::WorkerResult; use super::CommandSet; /// An event loop that listens for inputs from the `rx` receiver. Each received input will /// generate a command with the supplied command template. The generated command will then /// be executed, and this process will continue until the receiver's sender has closed. pub fn job( results: impl IntoIterator<Item = WorkerResult>, cmd: &CommandSet, config: &Config, ) -> ExitCode { // Output should be buffered when only running a single thread let buffer_output: bool = config.threads > 1; let mut ret = ExitCode::Success; for result in results { // Obtain the next result from the receiver, else if the channel // has closed, exit from the loop let dir_entry = match result { WorkerResult::Entry(dir_entry) => dir_entry, WorkerResult::Error(err) => { if config.show_filesystem_errors { print_error(err.to_string()); } continue; } }; // Generate a command, execute it and store its exit code. let code = cmd.execute( dir_entry.stripped_path(config), config.path_separator.as_deref(), config.null_separator, buffer_output, ); ret = merge_exitcodes([ret, code]); } // Returns error in case of any error. ret } pub fn batch( results: impl IntoIterator<Item = WorkerResult>, cmd: &CommandSet, config: &Config, ) -> ExitCode { let paths = results .into_iter() .filter_map(|worker_result| match worker_result { WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)), WorkerResult::Error(err) => { if config.show_filesystem_errors { print_error(err.to_string()); } None } }); cmd.execute_batch(paths, config.batch_size, config.path_separator.as_deref()) }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/fmt/mod.rs
src/fmt/mod.rs
mod input; use std::borrow::Cow; use std::ffi::{OsStr, OsString}; use std::fmt::{self, Display, Formatter}; use std::path::{Component, Path, Prefix}; use std::sync::OnceLock; use aho_corasick::AhoCorasick; use self::input::{basename, dirname, remove_extension}; /// Designates what should be written to a buffer /// /// Each `Token` contains either text, or a placeholder variant, which will be used to generate /// commands after all tokens for a given command template have been collected. #[derive(Clone, Debug, PartialEq, Eq)] pub enum Token { Placeholder, Basename, Parent, NoExt, BasenameNoExt, Text(String), } impl Display for Token { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self { Token::Placeholder => f.write_str("{}")?, Token::Basename => f.write_str("{/}")?, Token::Parent => f.write_str("{//}")?, Token::NoExt => f.write_str("{.}")?, Token::BasenameNoExt => f.write_str("{/.}")?, Token::Text(ref string) => f.write_str(string)?, } Ok(()) } } /// A parsed format string /// /// This is either a collection of `Token`s including at least one placeholder variant, /// or a fixed text. #[derive(Clone, Debug, PartialEq)] pub enum FormatTemplate { Tokens(Vec<Token>), Text(String), } static PLACEHOLDERS: OnceLock<AhoCorasick> = OnceLock::new(); impl FormatTemplate { pub fn has_tokens(&self) -> bool { matches!(self, FormatTemplate::Tokens(_)) } pub fn parse(fmt: &str) -> Self { // NOTE: we assume that { and } have the same length const BRACE_LEN: usize = '{'.len_utf8(); let mut tokens = Vec::new(); let mut remaining = fmt; let mut buf = String::new(); let placeholders = PLACEHOLDERS.get_or_init(|| { AhoCorasick::new(["{{", "}}", "{}", "{/}", "{//}", "{.}", "{/.}"]).unwrap() }); while let Some(m) = placeholders.find(remaining) { match m.pattern().as_u32() { 0 | 1 => { // we found an escaped {{ or }}, so add // everything up to the first char to the buffer // then skip the second one. buf += &remaining[..m.start() + BRACE_LEN]; remaining = &remaining[m.end()..]; } id if !remaining[m.end()..].starts_with('}') => { buf += &remaining[..m.start()]; if !buf.is_empty() { tokens.push(Token::Text(std::mem::take(&mut buf))); } tokens.push(token_from_pattern_id(id)); remaining = &remaining[m.end()..]; } _ => { // We got a normal pattern, but the final "}" // is escaped, so add up to that to the buffer, then // skip the final } buf += &remaining[..m.end()]; remaining = &remaining[m.end() + BRACE_LEN..]; } } } // Add the rest of the string to the buffer, and add the final buffer to the tokens if !remaining.is_empty() { buf += remaining; } if tokens.is_empty() { // No placeholders were found, so just return the text return FormatTemplate::Text(buf); } // Add final text segment if !buf.is_empty() { tokens.push(Token::Text(buf)); } debug_assert!(!tokens.is_empty()); FormatTemplate::Tokens(tokens) } /// Generate a result string from this template. If path_separator is Some, then it will replace /// the path separator in all placeholder tokens. Fixed text and tokens are not affected by /// path separator substitution. pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString { use Token::*; let path = path.as_ref(); match *self { Self::Tokens(ref tokens) => { let mut s = OsString::new(); for token in tokens { match token { Basename => s.push(Self::replace_separator(basename(path), path_separator)), BasenameNoExt => s.push(Self::replace_separator( &remove_extension(basename(path).as_ref()), path_separator, )), NoExt => s.push(Self::replace_separator( &remove_extension(path), path_separator, )), Parent => s.push(Self::replace_separator(&dirname(path), path_separator)), Placeholder => { s.push(Self::replace_separator(path.as_ref(), path_separator)) } Text(string) => s.push(string), } } s } Self::Text(ref text) => OsString::from(text), } } /// Replace the path separator in the input with the custom separator string. If path_separator /// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is /// interpreted as a Path and its components are iterated through and re-joined into a new /// OsString. fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> { // fast-path - no replacement necessary if path_separator.is_none() { return Cow::Borrowed(path); } let path_separator = path_separator.unwrap(); let mut out = OsString::with_capacity(path.len()); let mut components = Path::new(path).components().peekable(); while let Some(comp) = components.next() { match comp { // Absolute paths on Windows are tricky. A Prefix component is usually a drive // letter or UNC path, and is usually followed by RootDir. There are also // "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to // ignore verbatim path prefixes here because they're very rare, might be // impossible to reach here, and there's no good way to deal with them. If users // are doing something advanced involving verbatim windows paths, they can do their // own output filtering with a tool like sed. Component::Prefix(prefix) => { if let Prefix::UNC(server, share) = prefix.kind() { // Prefix::UNC is a parsed version of '\\server\share' out.push(path_separator); out.push(path_separator); out.push(server); out.push(path_separator); out.push(share); } else { // All other Windows prefix types are rendered as-is. This results in e.g. "C:" for // drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted, // but they're not returned by directories fd can search anyway so we don't worry // about them. out.push(comp.as_os_str()); } } // Root directory is always replaced with the custom separator. Component::RootDir => out.push(path_separator), // Everything else is joined normally, with a trailing separator if we're not last _ => { out.push(comp.as_os_str()); if components.peek().is_some() { out.push(path_separator); } } } } Cow::Owned(out) } } // Convert the id from an aho-corasick match to the // appropriate token fn token_from_pattern_id(id: u32) -> Token { use Token::*; match id { 2 => Placeholder, 3 => Basename, 4 => Parent, 5 => NoExt, 6 => BasenameNoExt, _ => unreachable!(), } } #[cfg(test)] mod fmt_tests { use super::*; use std::path::PathBuf; #[test] fn parse_no_placeholders() { let templ = FormatTemplate::parse("This string has no placeholders"); assert_eq!( templ, FormatTemplate::Text("This string has no placeholders".into()) ); } #[test] fn parse_only_brace_escapes() { let templ = FormatTemplate::parse("This string only has escapes like {{ and }}"); assert_eq!( templ, FormatTemplate::Text("This string only has escapes like { and }".into()) ); } #[test] fn all_placeholders() { use Token::*; let templ = FormatTemplate::parse( "{{path={} \ basename={/} \ parent={//} \ noExt={.} \ basenameNoExt={/.} \ }}", ); assert_eq!( templ, FormatTemplate::Tokens(vec![ Text("{path=".into()), Placeholder, Text(" basename=".into()), Basename, Text(" parent=".into()), Parent, Text(" noExt=".into()), NoExt, Text(" basenameNoExt=".into()), BasenameNoExt, Text(" }".into()), ]) ); let mut path = PathBuf::new(); path.push("a"); path.push("folder"); path.push("file.txt"); let expanded = templ.generate(&path, Some("/")).into_string().unwrap(); assert_eq!( expanded, "{path=a/folder/file.txt \ basename=file.txt \ parent=a/folder \ noExt=a/folder/file \ basenameNoExt=file }" ); } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/src/fmt/input.rs
src/fmt/input.rs
use std::ffi::{OsStr, OsString}; use std::path::{Path, PathBuf}; use crate::filesystem::strip_current_dir; /// Removes the parent component of the path pub fn basename(path: &Path) -> &OsStr { path.file_name().unwrap_or(path.as_os_str()) } /// Removes the extension from the path pub fn remove_extension(path: &Path) -> OsString { let dirname = dirname(path); let stem = path.file_stem().unwrap_or(path.as_os_str()); let path = PathBuf::from(dirname).join(stem); strip_current_dir(&path).to_owned().into_os_string() } /// Removes the basename from the path. pub fn dirname(path: &Path) -> OsString { path.parent() .map(|p| { if p == OsStr::new("") { OsString::from(".") } else { p.as_os_str().to_owned() } }) .unwrap_or_else(|| path.as_os_str().to_owned()) } #[cfg(test)] mod path_tests { use super::*; use std::path::MAIN_SEPARATOR_STR; fn correct(input: &str) -> String { input.replace('/', MAIN_SEPARATOR_STR) } macro_rules! func_tests { ($($name:ident: $func:ident for $input:expr => $output:expr)+) => { $( #[test] fn $name() { let input_path = PathBuf::from(&correct($input)); let output_string = OsString::from(correct($output)); assert_eq!($func(&input_path), output_string); } )+ } } func_tests! { remove_ext_simple: remove_extension for "foo.txt" => "foo" remove_ext_dir: remove_extension for "dir/foo.txt" => "dir/foo" hidden: remove_extension for ".foo" => ".foo" remove_ext_utf8: remove_extension for "💖.txt" => "💖" remove_ext_empty: remove_extension for "" => "" basename_simple: basename for "foo.txt" => "foo.txt" basename_dir: basename for "dir/foo.txt" => "foo.txt" basename_empty: basename for "" => "" basename_utf8_0: basename for "💖/foo.txt" => "foo.txt" basename_utf8_1: basename for "dir/💖.txt" => "💖.txt" dirname_simple: dirname for "foo.txt" => "." dirname_dir: dirname for "dir/foo.txt" => "dir" dirname_utf8_0: dirname for "💖/foo.txt" => "💖" dirname_utf8_1: dirname for "dir/💖.txt" => "dir" } #[test] #[cfg(windows)] fn dirname_root() { assert_eq!(dirname(&PathBuf::from("C:")), OsString::from("C:")); assert_eq!(dirname(&PathBuf::from("\\")), OsString::from("\\")); } #[test] #[cfg(not(windows))] fn dirname_root() { assert_eq!(dirname(&PathBuf::from("/")), OsString::from("/")); } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/tests/tests.rs
tests/tests.rs
mod testenv; #[cfg(unix)] use nix::unistd::{Gid, Group, Uid, User}; use std::fs; use std::io::Write; use std::path::Path; use std::time::{Duration, SystemTime}; use test_case::test_case; use jiff::Timestamp; use normpath::PathExt; use regex::escape; use crate::testenv::TestEnv; static DEFAULT_DIRS: &[&str] = &["one/two/three", "one/two/three/directory_foo"]; static DEFAULT_FILES: &[&str] = &[ "a.foo", "one/b.foo", "one/two/c.foo", "one/two/C.Foo2", "one/two/three/d.foo", "fdignored.foo", "gitignored.foo", ".hidden.foo", "e1 e2", ]; #[allow(clippy::let_and_return)] fn get_absolute_root_path(env: &TestEnv) -> String { let path = env .test_root() .normalize() .expect("absolute path") .as_path() .to_str() .expect("string") .to_string(); #[cfg(windows)] let path = path.trim_start_matches(r"\\?\").to_string(); path } #[cfg(test)] fn get_test_env_with_abs_path(dirs: &[&'static str], files: &[&'static str]) -> (TestEnv, String) { let env = TestEnv::new(dirs, files); let root_path = get_absolute_root_path(&env); (env, root_path) } #[cfg(test)] fn create_file_with_size<P: AsRef<Path>>(path: P, size_in_bytes: usize) { let content = "#".repeat(size_in_bytes); let mut f = fs::File::create::<P>(path).unwrap(); f.write_all(content.as_bytes()).unwrap(); } /// Simple test #[test] fn test_simple() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output(&["a.foo"], "a.foo"); te.assert_output(&["b.foo"], "one/b.foo"); te.assert_output(&["d.foo"], "one/two/three/d.foo"); te.assert_output( &["foo"], "a.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); } static AND_EXTRA_FILES: &[&str] = &[ "a.foo", "one/b.foo", "one/two/c.foo", "one/two/C.Foo2", "one/two/three/baz-quux", "one/two/three/Baz-Quux2", "one/two/three/d.foo", "fdignored.foo", "gitignored.foo", ".hidden.foo", "A-B.jpg", "A-C.png", "B-A.png", "B-C.png", "C-A.jpg", "C-B.png", "e1 e2", ]; /// AND test #[test] fn test_and_basic() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output( &["foo", "--and", "c"], "one/two/C.Foo2 one/two/c.foo one/two/three/directory_foo/", ); te.assert_output( &["f", "--and", "[ad]", "--and", "[_]"], "one/two/three/directory_foo/", ); te.assert_output( &["f", "--and", "[ad]", "--and", "[.]"], "a.foo one/two/three/d.foo", ); te.assert_output(&["Foo", "--and", "C"], "one/two/C.Foo2"); te.assert_output(&["foo", "--and", "asdasdasdsadasd"], ""); } #[test] fn test_and_empty_pattern() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output(&["Foo", "--and", "2", "--and", ""], "one/two/C.Foo2"); } #[test] fn test_and_bad_pattern() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_failure(&["Foo", "--and", "2", "--and", "[", "--and", "C"]); te.assert_failure(&["Foo", "--and", "[", "--and", "2", "--and", "C"]); te.assert_failure(&["Foo", "--and", "2", "--and", "C", "--and", "["]); te.assert_failure(&["[", "--and", "2", "--and", "C", "--and", "Foo"]); } #[test] fn test_and_pattern_starts_with_dash() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output( &["baz", "--and", "quux"], "one/two/three/Baz-Quux2 one/two/three/baz-quux", ); te.assert_output( &["baz", "--and", "-"], "one/two/three/Baz-Quux2 one/two/three/baz-quux", ); te.assert_output( &["Quu", "--and", "x", "--and", "-"], "one/two/three/Baz-Quux2", ); } #[test] fn test_and_plus_extension() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output( &[ "A", "--and", "B", "--extension", "jpg", "--extension", "png", ], "A-B.jpg B-A.png", ); te.assert_output( &[ "A", "--extension", "jpg", "--and", "B", "--extension", "png", ], "A-B.jpg B-A.png", ); } #[test] fn test_and_plus_type() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output( &["c", "--type", "d", "--and", "foo"], "one/two/three/directory_foo/", ); te.assert_output( &["c", "--type", "f", "--and", "foo"], "one/two/C.Foo2 one/two/c.foo", ); } #[test] fn test_and_plus_glob() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output(&["*foo", "--glob", "--and", "c*"], "one/two/c.foo"); } #[test] fn test_and_plus_fixed_strings() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output( &["foo", "--fixed-strings", "--and", "c", "--and", "."], "one/two/c.foo one/two/C.Foo2", ); te.assert_output( &["foo", "--fixed-strings", "--and", "[c]", "--and", "."], "", ); te.assert_output( &["Foo", "--fixed-strings", "--and", "C", "--and", "."], "one/two/C.Foo2", ); } #[test] fn test_and_plus_ignore_case() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output( &["Foo", "--ignore-case", "--and", "C", "--and", "[.]"], "one/two/C.Foo2 one/two/c.foo", ); } #[test] fn test_and_plus_case_sensitive() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output( &["foo", "--case-sensitive", "--and", "c", "--and", "[.]"], "one/two/c.foo", ); } #[test] fn test_and_plus_full_path() { let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES); te.assert_output( &[ "three", "--full-path", "--and", "_foo", "--and", r"[/\\]dir", ], "one/two/three/directory_foo/", ); te.assert_output( &[ "three", "--full-path", "--and", r"[/\\]two", "--and", r"[/\\]dir", ], "one/two/three/directory_foo/", ); } /// Test each pattern type with an empty pattern. #[test] fn test_empty_pattern() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let expected = "a.foo e1 e2 one/ one/b.foo one/two/ one/two/c.foo one/two/C.Foo2 one/two/three/ one/two/three/d.foo one/two/three/directory_foo/ symlink"; te.assert_output(&["--regex"], expected); te.assert_output(&["--fixed-strings"], expected); te.assert_output(&["--glob"], expected); } /// Test multiple directory searches #[test] fn test_multi_file() { let dirs = &["test1", "test2"]; let files = &["test1/a.foo", "test1/b.foo", "test2/a.foo"]; let te = TestEnv::new(dirs, files); te.assert_output( &["a.foo", "test1", "test2"], "test1/a.foo test2/a.foo", ); te.assert_output( &["", "test1", "test2"], "test1/a.foo test2/a.foo test1/b.foo", ); te.assert_output(&["a.foo", "test1"], "test1/a.foo"); te.assert_output(&["b.foo", "test1", "test2"], "test1/b.foo"); } /// Test search over multiple directory with missing #[test] fn test_multi_file_with_missing() { let dirs = &["real"]; let files = &["real/a.foo", "real/b.foo"]; let te = TestEnv::new(dirs, files); te.assert_output(&["a.foo", "real", "fake"], "real/a.foo"); te.assert_error( &["a.foo", "real", "fake"], "[fd error]: Search path 'fake' is not a directory.", ); te.assert_output( &["", "real", "fake"], "real/a.foo real/b.foo", ); te.assert_output( &["", "real", "fake1", "fake2"], "real/a.foo real/b.foo", ); te.assert_error( &["", "real", "fake1", "fake2"], "[fd error]: Search path 'fake1' is not a directory. [fd error]: Search path 'fake2' is not a directory.", ); te.assert_failure_with_error( &["", "fake1", "fake2"], "[fd error]: Search path 'fake1' is not a directory. [fd error]: Search path 'fake2' is not a directory. [fd error]: No valid search paths given.", ); } /// Explicit root path #[test] fn test_explicit_root_path() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["foo", "one"], "one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); te.assert_output( &["foo", "one/two/three"], "one/two/three/d.foo one/two/three/directory_foo/", ); te.assert_output_subdirectory( "one/two/", &["foo", "../../"], "../../a.foo ../../one/b.foo ../../one/two/c.foo ../../one/two/C.Foo2 ../../one/two/three/d.foo ../../one/two/three/directory_foo/", ); te.assert_output_subdirectory( "one/two/three", &["", ".."], "../c.foo ../C.Foo2 ../three/ ../three/d.foo ../three/directory_foo/", ); } /// Regex searches #[test] fn test_regex_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["[a-c].foo"], "a.foo one/b.foo one/two/c.foo one/two/C.Foo2", ); te.assert_output( &["--case-sensitive", "[a-c].foo"], "a.foo one/b.foo one/two/c.foo", ); } /// Smart case #[test] fn test_smart_case() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["c.foo"], "one/two/c.foo one/two/C.Foo2", ); te.assert_output(&["C.Foo"], "one/two/C.Foo2"); te.assert_output(&["Foo"], "one/two/C.Foo2"); // Only literal uppercase chars should trigger case sensitivity. te.assert_output( &["\\Ac"], "one/two/c.foo one/two/C.Foo2", ); te.assert_output(&["\\AC"], "one/two/C.Foo2"); } /// Case sensitivity (--case-sensitive) #[test] fn test_case_sensitive() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output(&["--case-sensitive", "c.foo"], "one/two/c.foo"); te.assert_output(&["--case-sensitive", "C.Foo"], "one/two/C.Foo2"); te.assert_output( &["--ignore-case", "--case-sensitive", "C.Foo"], "one/two/C.Foo2", ); } /// Case insensitivity (--ignore-case) #[test] fn test_case_insensitive() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--ignore-case", "C.Foo"], "one/two/c.foo one/two/C.Foo2", ); te.assert_output( &["--case-sensitive", "--ignore-case", "C.Foo"], "one/two/c.foo one/two/C.Foo2", ); } /// Glob-based searches (--glob) #[test] fn test_glob_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--glob", "*.foo"], "a.foo one/b.foo one/two/c.foo one/two/three/d.foo", ); te.assert_output( &["--glob", "[a-c].foo"], "a.foo one/b.foo one/two/c.foo", ); te.assert_output( &["--glob", "[a-c].foo*"], "a.foo one/b.foo one/two/C.Foo2 one/two/c.foo", ); } /// Glob-based searches (--glob) in combination with full path searches (--full-path) #[cfg(not(windows))] // TODO: make this work on Windows #[test] fn test_full_path_glob_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--glob", "--full-path", "**/one/**/*.foo"], "one/b.foo one/two/c.foo one/two/three/d.foo", ); te.assert_output( &["--glob", "--full-path", "**/one/*/*.foo"], " one/two/c.foo", ); te.assert_output( &["--glob", "--full-path", "**/one/*/*/*.foo"], " one/two/three/d.foo", ); } #[test] fn test_smart_case_glob_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--glob", "c.foo*"], "one/two/C.Foo2 one/two/c.foo", ); te.assert_output(&["--glob", "C.Foo*"], "one/two/C.Foo2"); } /// Glob-based searches (--glob) in combination with --case-sensitive #[test] fn test_case_sensitive_glob_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output(&["--glob", "--case-sensitive", "c.foo*"], "one/two/c.foo"); } /// Glob-based searches (--glob) in combination with --extension #[test] fn test_glob_searches_with_extension() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--glob", "--extension", "foo2", "[a-z].*"], "one/two/C.Foo2", ); } /// Make sure that --regex overrides --glob #[test] fn test_regex_overrides_glob() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output(&["--glob", "--regex", "Foo2$"], "one/two/C.Foo2"); } /// Full path search (--full-path) #[test] fn test_full_path() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let root = te.system_root(); let prefix = escape(&root.to_string_lossy()); te.assert_output( &["--full-path", &format!("^{prefix}.*three.*foo$")], "one/two/three/d.foo one/two/three/directory_foo/", ); } /// Hidden files (--hidden) #[test] fn test_hidden() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--hidden", "foo"], ".hidden.foo a.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); } /// Hidden file attribute on Windows #[cfg(windows)] #[test] fn test_hidden_file_attribute() { use std::os::windows::fs::OpenOptionsExt; let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); // https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-setfileattributesa const FILE_ATTRIBUTE_HIDDEN: u32 = 2; fs::OpenOptions::new() .create(true) .write(true) .attributes(FILE_ATTRIBUTE_HIDDEN) .open(te.test_root().join("hidden-file.txt")) .unwrap(); te.assert_output(&["--hidden", "hidden-file.txt"], "hidden-file.txt"); te.assert_output(&["hidden-file.txt"], ""); } /// Ignored files (--no-ignore) #[test] fn test_no_ignore() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--no-ignore", "foo"], "a.foo fdignored.foo gitignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); te.assert_output( &["--hidden", "--no-ignore", "foo"], ".hidden.foo a.foo fdignored.foo gitignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); } /// .gitignore and .fdignore #[test] fn test_gitignore_and_fdignore() { let files = &[ "ignored-by-nothing", "ignored-by-fdignore", "ignored-by-gitignore", "ignored-by-both", ]; let te = TestEnv::new(&[], files); fs::File::create(te.test_root().join(".fdignore")) .unwrap() .write_all(b"ignored-by-fdignore\nignored-by-both") .unwrap(); fs::File::create(te.test_root().join(".gitignore")) .unwrap() .write_all(b"ignored-by-gitignore\nignored-by-both") .unwrap(); te.assert_output(&["ignored"], "ignored-by-nothing"); te.assert_output( &["--no-ignore-vcs", "ignored"], "ignored-by-nothing ignored-by-gitignore", ); te.assert_output( &["--no-ignore", "ignored"], "ignored-by-nothing ignored-by-fdignore ignored-by-gitignore ignored-by-both", ); } /// Ignore parent ignore files (--no-ignore-parent) #[test] fn test_no_ignore_parent() { let dirs = &["inner"]; let files = &[ "inner/parent-ignored", "inner/child-ignored", "inner/not-ignored", ]; let te = TestEnv::new(dirs, files); // Ignore 'parent-ignored' in root fs::File::create(te.test_root().join(".gitignore")) .unwrap() .write_all(b"parent-ignored") .unwrap(); // Ignore 'child-ignored' in inner fs::File::create(te.test_root().join("inner/.gitignore")) .unwrap() .write_all(b"child-ignored") .unwrap(); te.assert_output_subdirectory("inner", &[], "not-ignored"); te.assert_output_subdirectory( "inner", &["--no-ignore-parent"], "parent-ignored not-ignored", ); } /// Ignore parent ignore files (--no-ignore-parent) with an inner git repo #[test] fn test_no_ignore_parent_inner_git() { let dirs = &["inner"]; let files = &[ "inner/parent-ignored", "inner/child-ignored", "inner/not-ignored", ]; let te = TestEnv::new(dirs, files); // Make the inner folder also appear as a git repo fs::create_dir_all(te.test_root().join("inner/.git")).unwrap(); // Ignore 'parent-ignored' in root fs::File::create(te.test_root().join(".gitignore")) .unwrap() .write_all(b"parent-ignored") .unwrap(); // Ignore 'child-ignored' in inner fs::File::create(te.test_root().join("inner/.gitignore")) .unwrap() .write_all(b"child-ignored") .unwrap(); te.assert_output_subdirectory( "inner", &[], "not-ignored parent-ignored", ); te.assert_output_subdirectory( "inner", &["--no-ignore-parent"], "not-ignored parent-ignored", ); } /// Precedence of .fdignore files #[test] fn test_custom_ignore_precedence() { let dirs = &["inner"]; let files = &["inner/foo"]; let te = TestEnv::new(dirs, files); // Ignore 'foo' via .gitignore fs::File::create(te.test_root().join("inner/.gitignore")) .unwrap() .write_all(b"foo") .unwrap(); // Whitelist 'foo' via .fdignore fs::File::create(te.test_root().join(".fdignore")) .unwrap() .write_all(b"!foo") .unwrap(); te.assert_output(&["foo"], "inner/foo"); te.assert_output(&["--no-ignore-vcs", "foo"], "inner/foo"); te.assert_output(&["--no-ignore", "foo"], "inner/foo"); } /// Don't require git to respect gitignore (--no-require-git) #[test] fn test_respect_ignore_files() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); // Not in a git repo anymore fs::remove_dir(te.test_root().join(".git")).unwrap(); // don't respect gitignore because we're not in a git repo te.assert_output( &["foo"], "a.foo gitignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); // respect gitignore because we set `--no-require-git` te.assert_output( &["--no-require-git", "foo"], "a.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); // make sure overriding works te.assert_output( &["--no-require-git", "--require-git", "foo"], "a.foo gitignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); te.assert_output( &["--no-require-git", "--no-ignore", "foo"], "a.foo gitignored.foo fdignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); } /// VCS ignored files (--no-ignore-vcs) #[test] fn test_no_ignore_vcs() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--no-ignore-vcs", "foo"], "a.foo gitignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); } /// Test that --no-ignore-vcs still respects .fdignored in parent directory #[test] fn test_no_ignore_vcs_child_dir() { let te = TestEnv::new( &["inner"], &["inner/fdignored.foo", "inner/foo", "inner/gitignored.foo"], ); te.assert_output_subdirectory( "inner", &["--no-ignore-vcs", "foo"], "foo gitignored.foo", ); } /// Custom ignore files (--ignore-file) #[test] fn test_custom_ignore_files() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); // Ignore 'C.Foo2' and everything in 'three'. fs::File::create(te.test_root().join("custom.ignore")) .unwrap() .write_all(b"C.Foo2\nthree") .unwrap(); te.assert_output( &["--ignore-file", "custom.ignore", "foo"], "a.foo one/b.foo one/two/c.foo", ); } /// Ignored files with ripgrep aliases (-u / -uu) #[test] fn test_no_ignore_aliases() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["-u", "foo"], ".hidden.foo a.foo fdignored.foo gitignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/", ); } #[cfg(not(windows))] #[test] fn test_global_ignore() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file("one"); te.assert_output( &[], "a.foo e1 e2 symlink", ); } #[cfg(not(windows))] #[test_case("--unrestricted", ".hidden.foo a.foo fdignored.foo gitignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/"; "unrestricted")] #[test_case("--no-ignore", "a.foo fdignored.foo gitignored.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/"; "no-ignore")] #[test_case("--no-global-ignore-file", "a.foo one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo/"; "no-global-ignore-file")] fn test_no_global_ignore(flag: &str, expected_output: &str) { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file("one"); te.assert_output(&[flag, "foo"], expected_output); } /// Symlinks (--follow) #[test] fn test_follow() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--follow", "c.foo"], "one/two/c.foo one/two/C.Foo2 symlink/c.foo symlink/C.Foo2", ); } // File system boundaries (--one-file-system) // Limited to Unix because, to the best of my knowledge, there is no easy way to test a use case // file systems mounted into the tree on Windows. // Not limiting depth causes massive delay under Darwin, see BurntSushi/ripgrep#1429 #[test] #[cfg(unix)] fn test_file_system_boundaries() { // Helper function to get the device ID for a given path // Inspired by https://github.com/BurntSushi/ripgrep/blob/8892bf648cfec111e6e7ddd9f30e932b0371db68/ignore/src/walk.rs#L1693 fn device_num(path: impl AsRef<Path>) -> u64 { use std::os::unix::fs::MetadataExt; path.as_ref().metadata().map(|md| md.dev()).unwrap() } // Can't simulate file system boundaries let te = TestEnv::new(&[], &[]); let dev_null = Path::new("/dev/null"); // /dev/null should exist in all sane Unixes. Skip if it doesn't exist for some reason. // Also skip should it be on the same device as the root partition for some reason. if !dev_null.is_file() || device_num(dev_null) == device_num("/") { return; } te.assert_output( &["--full-path", "--max-depth", "2", "^/dev/null$", "/"], "/dev/null", ); te.assert_output( &[ "--one-file-system", "--full-path", "--max-depth", "2", "^/dev/null$", "/", ], "", ); } #[test] fn test_follow_broken_symlink() { let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.create_broken_symlink("broken_symlink") .expect("Failed to create broken symlink."); te.assert_output( &["symlink"], "broken_symlink symlink", ); te.assert_output( &["--type", "symlink", "symlink"], "broken_symlink symlink", ); te.assert_output(&["--type", "file", "symlink"], ""); te.assert_output( &["--follow", "--type", "symlink", "symlink"], "broken_symlink", ); te.assert_output(&["--follow", "--type", "file", "symlink"], ""); } /// Null separator (--print0) #[test] fn test_print0() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--print0", "foo"], "./a.fooNULL ./one/b.fooNULL ./one/two/C.Foo2NULL ./one/two/c.fooNULL ./one/two/three/d.fooNULL ./one/two/three/directory_foo/NULL", ); } /// Maximum depth (--max-depth) #[test] fn test_max_depth() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--max-depth", "3"], "a.foo e1 e2 one/ one/b.foo one/two/ one/two/c.foo one/two/C.Foo2 one/two/three/ symlink", ); te.assert_output( &["--max-depth", "2"], "a.foo e1 e2 one/ one/b.foo one/two/ symlink", ); te.assert_output( &["--max-depth", "1"], "a.foo e1 e2 one/ symlink", ); } /// Minimum depth (--min-depth) #[test] fn test_min_depth() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--min-depth", "3"], "one/two/c.foo one/two/C.Foo2 one/two/three/ one/two/three/d.foo one/two/three/directory_foo/", ); te.assert_output( &["--min-depth", "4"], "one/two/three/d.foo one/two/three/directory_foo/", ); } /// Exact depth (--exact-depth) #[test] fn test_exact_depth() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--exact-depth", "3"], "one/two/c.foo one/two/C.Foo2 one/two/three/", ); } /// Pruning (--prune) #[test] fn test_prune() { let dirs = &["foo/bar", "bar/foo", "baz"]; let files = &[ "foo/foo.file", "foo/bar/foo.file", "bar/foo.file", "bar/foo/foo.file", "baz/foo.file", ]; let te = TestEnv::new(dirs, files); te.assert_output( &["foo"], "foo/ foo/foo.file foo/bar/foo.file bar/foo.file bar/foo/ bar/foo/foo.file baz/foo.file", ); te.assert_output( &["--prune", "foo"], "foo/ bar/foo/ bar/foo.file baz/foo.file", ); } /// Absolute paths (--absolute-path) #[test] fn test_absolute_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--absolute-path"], &format!( "{abs_path}/a.foo {abs_path}/e1 e2 {abs_path}/one/ {abs_path}/one/b.foo {abs_path}/one/two/ {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three/ {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo/ {abs_path}/symlink", abs_path = &abs_path ), ); te.assert_output( &["--absolute-path", "foo"], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo/", abs_path = &abs_path ), ); } /// Show absolute paths if the path argument is absolute #[test] fn test_implicit_absolute_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["foo", &abs_path], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo/", abs_path = &abs_path ), ); } /// Absolute paths should be normalized #[test] fn test_normalized_absolute_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output_subdirectory( "one", &["--absolute-path", "foo", ".."], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo/", abs_path = &abs_path ), ); } /// File type filter (--type) #[test] fn test_type() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--type", "f"], "a.foo e1 e2 one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo", ); te.assert_output(&["--type", "f", "e1"], "e1 e2"); te.assert_output( &["--type", "d"], "one/ one/two/ one/two/three/ one/two/three/directory_foo/", ); te.assert_output( &["--type", "d", "--type", "l"], "one/ one/two/ one/two/three/ one/two/three/directory_foo/ symlink", ); te.assert_output(&["--type", "l"], "symlink"); } /// Test `--type executable` #[cfg(unix)] #[test] fn test_type_executable() { use std::os::unix::fs::OpenOptionsExt; // This test assumes the current user isn't root // (otherwise if the executable bit is set for any level, it is executable for the current // user) if Uid::current().is_root() { return; } let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); fs::OpenOptions::new() .create_new(true) .truncate(true) .write(true) .mode(0o777) .open(te.test_root().join("executable-file.sh")) .unwrap(); fs::OpenOptions::new() .create(true) .truncate(true) .write(true) .mode(0o645) .open(te.test_root().join("not-user-executable-file.sh")) .unwrap(); te.assert_output(&["--type", "executable"], "executable-file.sh"); te.assert_output( &["--type", "executable", "--type", "directory"], "executable-file.sh one/ one/two/ one/two/three/ one/two/three/directory_foo/", ); } /// Test `--type empty` #[test] fn test_type_empty() { let te = TestEnv::new(&["dir_empty", "dir_nonempty"], &[]); create_file_with_size(te.test_root().join("0_bytes.foo"), 0); create_file_with_size(te.test_root().join("5_bytes.foo"), 5); create_file_with_size(te.test_root().join("dir_nonempty").join("2_bytes.foo"), 2); te.assert_output( &["--type", "empty"], "0_bytes.foo dir_empty/", ); te.assert_output( &["--type", "empty", "--type", "file", "--type", "directory"], "0_bytes.foo dir_empty/", ); te.assert_output(&["--type", "empty", "--type", "file"], "0_bytes.foo"); te.assert_output(&["--type", "empty", "--type", "directory"], "dir_empty/"); } /// File extension (--extension) #[test] fn test_extension() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--extension", "foo"], "a.foo one/b.foo one/two/c.foo one/two/three/d.foo", ); te.assert_output( &["--extension", ".foo"], "a.foo one/b.foo one/two/c.foo one/two/three/d.foo", ); te.assert_output( &["--extension", ".foo", "--extension", "foo2"], "a.foo one/b.foo one/two/c.foo one/two/three/d.foo
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
true
sharkdp/fd
https://github.com/sharkdp/fd/blob/5f95a781212e3efbc6d91ae50e0ca0ce0693da50/tests/testenv/mod.rs
tests/testenv/mod.rs
use std::env; use std::fs; use std::io::{self, Write}; #[cfg(unix)] use std::os::unix; #[cfg(windows)] use std::os::windows; use std::path::{Path, PathBuf}; use std::process; use tempfile::TempDir; /// Environment for the integration tests. pub struct TestEnv { /// Temporary working directory. temp_dir: TempDir, /// Path to the *fd* executable. fd_exe: PathBuf, /// Normalize each line by sorting the whitespace-separated words normalize_line: bool, /// Temporary directory for storing test config (global ignore file) config_dir: Option<TempDir>, } /// Create the working directory and the test files. fn create_working_directory( directories: &[&'static str], files: &[&'static str], ) -> Result<TempDir, io::Error> { let temp_dir = tempfile::Builder::new().prefix("fd-tests").tempdir()?; { let root = temp_dir.path(); // Pretend that this is a Git repository in order for `.gitignore` files to be respected fs::create_dir_all(root.join(".git"))?; for directory in directories { fs::create_dir_all(root.join(directory))?; } for file in files { fs::File::create(root.join(file))?; } #[cfg(unix)] unix::fs::symlink(root.join("one/two"), root.join("symlink"))?; // Note: creating symlinks on Windows requires the `SeCreateSymbolicLinkPrivilege` which // is by default only granted for administrators. #[cfg(windows)] windows::fs::symlink_dir(root.join("one/two"), root.join("symlink"))?; fs::File::create(root.join(".fdignore"))?.write_all(b"fdignored.foo")?; fs::File::create(root.join(".gitignore"))?.write_all(b"gitignored.foo")?; } Ok(temp_dir) } fn create_config_directory_with_global_ignore(ignore_file_content: &str) -> io::Result<TempDir> { let config_dir = tempfile::Builder::new().prefix("fd-config").tempdir()?; let fd_dir = config_dir.path().join("fd"); fs::create_dir(&fd_dir)?; let mut ignore_file = fs::File::create(fd_dir.join("ignore"))?; ignore_file.write_all(ignore_file_content.as_bytes())?; Ok(config_dir) } /// Find the *fd* executable. fn find_fd_exe() -> PathBuf { // Read the location of the fd executable from the environment PathBuf::from(env::var("CARGO_BIN_EXE_fd").unwrap_or(env!("CARGO_BIN_EXE_fd").to_string())) } /// Format an error message for when *fd* did not exit successfully. fn format_exit_error(args: &[&str], output: &process::Output) -> String { format!( "`fd {}` did not exit successfully.\nstdout:\n---\n{}---\nstderr:\n---\n{}---", args.join(" "), String::from_utf8_lossy(&output.stdout), String::from_utf8_lossy(&output.stderr) ) } /// Format an error message for when the output of *fd* did not match the expected output. fn format_output_error(args: &[&str], expected: &str, actual: &str) -> String { // Generate diff text. let diff_text = diff::lines(expected, actual) .into_iter() .map(|diff| match diff { diff::Result::Left(l) => format!("-{l}"), diff::Result::Both(l, _) => format!(" {l}"), diff::Result::Right(r) => format!("+{r}"), }) .collect::<Vec<_>>() .join("\n"); format!( concat!( "`fd {}` did not produce the expected output.\n", "Showing diff between expected and actual:\n{}\n" ), args.join(" "), diff_text ) } /// Normalize the output for comparison. fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String { // Split into lines and normalize separators. let mut lines = s .replace('\0', "NULL\n") .lines() .map(|line| { let line = if trim_start { line.trim_start() } else { line }; let line = line.replace('/', std::path::MAIN_SEPARATOR_STR); if normalize_line { let mut words: Vec<_> = line.split_whitespace().collect(); words.sort_unstable(); return words.join(" "); } line }) .collect::<Vec<_>>(); lines.sort(); lines.join("\n") } /// Trim whitespace from the beginning of each line. fn trim_lines(s: &str) -> String { s.lines() .map(|line| line.trim_start()) .fold(String::new(), |mut str, line| { str.push_str(line); str.push('\n'); str }) } impl TestEnv { pub fn new(directories: &[&'static str], files: &[&'static str]) -> TestEnv { let temp_dir = create_working_directory(directories, files).expect("working directory"); let fd_exe = find_fd_exe(); TestEnv { temp_dir, fd_exe, normalize_line: false, config_dir: None, } } pub fn normalize_line(self, normalize: bool) -> TestEnv { TestEnv { temp_dir: self.temp_dir, fd_exe: self.fd_exe, normalize_line: normalize, config_dir: self.config_dir, } } pub fn global_ignore_file(self, content: &str) -> TestEnv { let config_dir = create_config_directory_with_global_ignore(content).expect("config directory"); TestEnv { config_dir: Some(config_dir), ..self } } /// Create a broken symlink at the given path in the temp_dir. pub fn create_broken_symlink<P: AsRef<Path>>( &mut self, link_path: P, ) -> Result<PathBuf, io::Error> { let root = self.test_root(); let broken_symlink_link = root.join(link_path); { let temp_target_dir = tempfile::Builder::new() .prefix("fd-tests-broken-symlink") .tempdir()?; let broken_symlink_target = temp_target_dir.path().join("broken_symlink_target"); fs::File::create(&broken_symlink_target)?; #[cfg(unix)] unix::fs::symlink(&broken_symlink_target, &broken_symlink_link)?; #[cfg(windows)] windows::fs::symlink_file(&broken_symlink_target, &broken_symlink_link)?; } Ok(broken_symlink_link) } /// Get the root directory for the tests. pub fn test_root(&self) -> PathBuf { self.temp_dir.path().to_path_buf() } /// Get the path of the fd executable. #[cfg_attr(windows, allow(unused))] pub fn test_exe(&self) -> &PathBuf { &self.fd_exe } /// Get the root directory of the file system. pub fn system_root(&self) -> PathBuf { let mut components = self.temp_dir.path().components(); PathBuf::from(components.next().expect("root directory").as_os_str()) } /// Assert that calling *fd* in the specified path under the root working directory, /// and with the specified arguments produces the expected output. pub fn assert_success_and_get_output<P: AsRef<Path>>( &self, path: P, args: &[&str], ) -> process::Output { // Run *fd*. let output = self.run_command(path.as_ref(), args); // Check for exit status. if !output.status.success() { panic!("{}", format_exit_error(args, &output)); } output } pub fn assert_success_and_get_normalized_output<P: AsRef<Path>>( &self, path: P, args: &[&str], ) -> String { let output = self.assert_success_and_get_output(path, args); normalize_output( &String::from_utf8_lossy(&output.stdout), false, self.normalize_line, ) } /// Assert that calling *fd* with the specified arguments produces the expected output. pub fn assert_output(&self, args: &[&str], expected: &str) { self.assert_output_subdirectory(".", args, expected) } /// Similar to assert_output, but able to handle non-utf8 output #[cfg(all(unix, not(target_os = "macos")))] pub fn assert_output_raw(&self, args: &[&str], expected: &[u8]) { let output = self.assert_success_and_get_output(".", args); assert_eq!(expected, &output.stdout[..]); } /// Assert that calling *fd* in the specified path under the root working directory, /// and with the specified arguments produces the expected output. pub fn assert_output_subdirectory<P: AsRef<Path>>( &self, path: P, args: &[&str], expected: &str, ) { // Normalize both expected and actual output. let expected = normalize_output(expected, true, self.normalize_line); let actual = self.assert_success_and_get_normalized_output(path, args); // Compare actual output to expected output. if expected != actual { panic!("{}", format_output_error(args, &expected, &actual)); } } /// Assert that calling *fd* with the specified arguments produces the expected error, /// and does not succeed. pub fn assert_failure_with_error(&self, args: &[&str], expected: &str) { let status = self.assert_error_subdirectory(".", args, Some(expected)); if status.success() { panic!("error '{expected}' did not occur."); } } /// Assert that calling *fd* with the specified arguments does not succeed. pub fn assert_failure(&self, args: &[&str]) { let status = self.assert_error_subdirectory(".", args, None); if status.success() { panic!("Failure did not occur as expected."); } } /// Assert that calling *fd* with the specified arguments produces the expected error. pub fn assert_error(&self, args: &[&str], expected: &str) -> process::ExitStatus { self.assert_error_subdirectory(".", args, Some(expected)) } fn run_command(&self, path: &Path, args: &[&str]) -> process::Output { // Setup *fd* command. let mut cmd = process::Command::new(&self.fd_exe); cmd.current_dir(self.temp_dir.path().join(path)); if let Some(config_dir) = &self.config_dir { cmd.env("XDG_CONFIG_HOME", config_dir.path()); } else { cmd.arg("--no-global-ignore-file"); } // Make sure LS_COLORS is unset to ensure consistent // color output cmd.env("LS_COLORS", ""); cmd.args(args); // Run *fd*. cmd.output().expect("fd output") } /// Assert that calling *fd* in the specified path under the root working directory, /// and with the specified arguments produces an error with the expected message. fn assert_error_subdirectory<P: AsRef<Path>>( &self, path: P, args: &[&str], expected: Option<&str>, ) -> process::ExitStatus { let output = self.run_command(path.as_ref(), args); if let Some(expected) = expected { // Normalize both expected and actual output. let expected_error = trim_lines(expected); let actual_err = trim_lines(&String::from_utf8_lossy(&output.stderr)); // Compare actual output to expected output. if !actual_err.trim_start().starts_with(&expected_error) { panic!( "{}", format_output_error(args, &expected_error, &actual_err) ); } } output.status } }
rust
Apache-2.0
5f95a781212e3efbc6d91ae50e0ca0ce0693da50
2026-01-04T15:31:59.463143Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/versions-replacer/src/lib.rs
scripts/versions-replacer/src/lib.rs
pub mod metadata; pub mod replace;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/versions-replacer/src/replace.rs
scripts/versions-replacer/src/replace.rs
use std::{borrow::Cow, collections::HashMap, fs, path::Path}; use color_eyre::{Result, eyre::Context}; use once_cell::sync::Lazy; use regex::{Captures, Regex}; pub static VERSIONS_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"\{\{versions\.([\w_-]+)\}\}").unwrap()); pub fn replace_versions_in_file( path: impl AsRef<Path>, versions: &HashMap<String, String>, ) -> Result<usize> { let path = path.as_ref(); let contents = fs::read_to_string(path).wrap_err_with(|| format!("failed to read {:?}", path))?; let (replaced_contents, replacement_count) = replace_versions_in_string(&contents, versions); if replacement_count > 0 { fs::write(path, replaced_contents.as_bytes()) .wrap_err_with(|| format!("failed to write back to {:?}", path))?; } Ok(replacement_count) } pub fn replace_versions_in_string<'a>( s: &'a str, versions: &HashMap<String, String>, ) -> (Cow<'a, str>, usize) { let mut replacement_count = 0; let replaced_s = VERSIONS_REGEX.replace_all(s, |caps: &Captures| { if let Some(version) = versions.get(&caps[1]) { replacement_count += 1; version.clone() } else { // leave unchanged caps[0].to_string() } }); (replaced_s, replacement_count) } #[cfg(test)] mod tests { use super::*; fn test_versions() -> HashMap<String, String> { [("fuels", "0.47.0"), ("fuel-types", "0.35.3")] .map(|(name, version)| (name.to_string(), version.to_string())) .into() } #[test] fn test_valid_replacements() { let s = "docs.rs/fuels/{{versions.fuels}}/fuels\ndocs.rs/fuel-types/{{versions.fuel-types}}/fuel-types"; let versions = test_versions(); let (replaced, count) = replace_versions_in_string(s, &versions); assert_eq!( replaced, format!( "docs.rs/fuels/{}/fuels\ndocs.rs/fuel-types/{}/fuel-types", versions["fuels"], versions["fuel-types"] ) ); assert_eq!(count, 2); } #[test] fn test_invalid_replacement() { let s = "```rust,ignore {{#include ../../../examples/contracts/src/lib.rs:deployed_contracts}} ```"; let versions = test_versions(); let (replaced, count) = replace_versions_in_string(s, &versions); assert_eq!(replaced, s); assert_eq!(count, 0); } #[test] fn test_invalid_package_name() { let s = "docs.rs/fuels-wrong-name/{{versions.fuels-wrong-name}}/fuels-wrong-name"; let versions = test_versions(); let (replaced, count) = replace_versions_in_string(s, &versions); assert_eq!(replaced, s); assert_eq!(count, 0); } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/versions-replacer/src/main.rs
scripts/versions-replacer/src/main.rs
use std::path::PathBuf; use argh::FromArgs; use color_eyre::{ Result, eyre::{Context, eyre}, }; use regex::Regex; use versions_replacer::{ metadata::collect_versions_from_cargo_toml, replace::replace_versions_in_file, }; use walkdir::WalkDir; #[derive(FromArgs)] /// Replace variables like '{{{{versions.fuels}}}}' with correct versions from Cargo.toml. /// Uses versions from '[workspace.members]' and '[workspace.metadata.versions-replacer.external-versions]'. struct VersionsReplacer { /// path to directory with files containing variables #[argh(positional)] path: PathBuf, /// path to Cargo.toml with versions #[argh(option)] manifest_path: PathBuf, /// regex to filter filenames (example: "\.md$") #[argh(option)] filename_regex: Option<Regex>, } fn main() -> Result<()> { let args: VersionsReplacer = argh::from_env(); let versions = collect_versions_from_cargo_toml(&args.manifest_path)?; let mut total_replacements: Vec<usize> = Vec::new(); for entry in WalkDir::new(&args.path) { let entry = entry.wrap_err("failed to get directory entry")?; if entry.path().is_file() { if let Some(filename_regex) = &args.filename_regex { let file_name = entry .path() .file_name() .ok_or_else(|| eyre!("{:?} has an invalid file name", entry.path()))? .to_str() .ok_or_else(|| eyre!("filename is not valid UTF-8"))?; if !filename_regex.is_match(file_name) { continue; } } let replacement_count = replace_versions_in_file(entry.path(), &versions) .wrap_err_with(|| format!("failed to replace versions in {:?}", entry.path()))?; if replacement_count > 0 { total_replacements.push(replacement_count); } } } println!( "replaced {} variables across {} files", total_replacements.iter().sum::<usize>(), total_replacements.len() ); Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/versions-replacer/src/metadata.rs
scripts/versions-replacer/src/metadata.rs
use std::{collections::HashMap, path::Path}; use cargo_metadata::MetadataCommand; use color_eyre::{Result, eyre::Context}; use serde::Deserialize; #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct WorkspaceMetadata { pub versions_replacer: VersionsReplacerMetadata, } #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct VersionsReplacerMetadata { pub external_versions: HashMap<String, String>, } pub fn collect_versions_from_cargo_toml( manifest_path: impl AsRef<Path>, ) -> Result<HashMap<String, String>> { let metadata = MetadataCommand::new() .manifest_path(manifest_path.as_ref()) .exec() .wrap_err("failed to execute 'cargo metadata'")?; let version_map = metadata .packages .iter() .map(|package| (package.name.clone(), package.version.to_string())) .collect::<HashMap<_, _>>(); Ok(version_map) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/fuel-core-version/src/main.rs
scripts/fuel-core-version/src/main.rs
use std::{ fs, path::{Path, PathBuf}, }; use clap::{Parser, Subcommand}; use color_eyre::{ Result, eyre::{ContextCompat, OptionExt, bail}, }; use fuels_accounts::provider::SUPPORTED_FUEL_CORE_VERSION; use semver::Version; use toml::Value; fn write_version_to_file(version: Version, version_file_path: impl AsRef<Path>) -> Result<()> { let Version { major, minor, patch, .. } = version; let text = format!("Version::new({major}, {minor}, {patch})"); fs::write(version_file_path, text.as_bytes())?; Ok(()) } fn get_version_file_path( manifest_path: impl AsRef<Path>, ) -> Result<PathBuf, color_eyre::eyre::Error> { Ok(manifest_path .as_ref() .parent() .wrap_err("Invalid manifest path")? .join("scripts/fuel-core-version/version.rs")) } fn verify_version_from_file(version: Version) -> Result<()> { if version != SUPPORTED_FUEL_CORE_VERSION { bail!( "fuel_core version in version.rs ({}) doesn't match one in Cargo.toml ({})", SUPPORTED_FUEL_CORE_VERSION, version ); } println!( "fuel_core versions in versions.rs and Cargo.toml match ({})", version ); Ok(()) } #[derive(Debug, Parser)] struct App { #[clap(subcommand)] command: Command, #[clap(long)] manifest_path: PathBuf, } #[derive(Debug, Subcommand)] enum Command { Write, Verify, } fn main() -> Result<()> { let App { command, manifest_path, } = App::parse(); let version = read_fuel_core_version(&manifest_path)?; let version_file_path = get_version_file_path(&manifest_path)?; match command { Command::Write => write_version_to_file(version, version_file_path)?, Command::Verify => verify_version_from_file(version)?, } Ok(()) } pub fn read_fuel_core_version(path: impl AsRef<Path>) -> color_eyre::Result<Version> { let cargo_toml: Value = fs::read_to_string(path.as_ref())?.parse::<Value>()?; let str_version = find_dependency_version(&cargo_toml).ok_or_eyre("could not find fuel-core version")?; Ok(str_version.parse()?) } fn find_dependency_version(toml: &Value) -> Option<String> { match toml .get("workspace")? .get("dependencies")? .get("fuel-core")? { Value::String(version) => Some(version.clone()), Value::Table(table) => table .get("version") .and_then(|v| v.as_str()) .map(String::from), _ => None, } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/check-docs/src/lib.rs
scripts/check-docs/src/lib.rs
use std::{ collections::HashSet, path::{Path, PathBuf}, }; use anyhow::{Error, anyhow, bail}; use itertools::{Itertools, chain}; use regex::Regex; pub fn report_errors(error_type: &str, errors: &[Error]) { if !errors.is_empty() { eprintln!("\nInvalid {error_type} detected!\n"); for error in errors { eprintln!("{error}\n") } } } pub fn report_warnings(warnings: &[Error]) { if !warnings.is_empty() { eprintln!("\nWarnings detected!\n"); for warning in warnings { eprintln!("{warning}\n") } } } pub fn validate_includes( includes: Vec<Include>, valid_anchors: Vec<Anchor>, ) -> (Vec<Error>, Vec<Error>) { let (pairs, errors): (Vec<_>, Vec<_>) = includes .into_iter() .filter(|include| !include.anchor_name.is_empty()) .map(|include| { let mut maybe_anchor = valid_anchors.iter().find(|anchor| { anchor.file == include.anchor_file && anchor.name == include.anchor_name }); match maybe_anchor.take() { Some(anchor) => Ok(anchor.clone()), None => Err(anyhow!( "No anchor available to satisfy include {include:?}" )), } }) .partition_result(); let additional_warnings = valid_anchors .iter() .filter(|valid_anchor| { let anchor_used_in_a_pair = pairs.iter().any(|anchor| anchor == *valid_anchor); !anchor_used_in_a_pair }) .map(|unused_anchor| anyhow!("Anchor unused: {unused_anchor:?}!")) .collect::<Vec<_>>(); (errors, additional_warnings) } #[allow(dead_code)] #[derive(Debug, Clone)] pub struct Include { pub anchor_name: String, pub anchor_file: PathBuf, pub include_file: PathBuf, pub line_no: usize, } pub fn parse_includes(text_w_includes: String) -> (Vec<Include>, Vec<Error>) { let apply_regex = |regex: Regex| { let (includes, errors): (Vec<_>, Vec<_>) = text_w_includes .lines() .filter_map(|line| regex.captures(line)) .map(|capture| { let include_file = PathBuf::from(&capture[1]).canonicalize()?; let line_no = capture[2].parse()?; let anchor_file = PathBuf::from(&capture[3]); let anchor_name = capture.get(4).map_or("", |m| m.as_str()).to_string(); let the_path = include_file.parent().unwrap().join(anchor_file); let anchor_file = the_path.canonicalize().map_err(|err| { anyhow!( "{the_path:?} when canonicalized gives error {err:?}\ninclude_file: {:?}", include_file ) })?; Ok(Include { anchor_name, anchor_file, include_file, line_no, }) }) .partition_result(); (includes, errors) }; apply_regex( Regex::new(r"^(\S+):(\d+):\s*\{\{\s*#include\s*(\S+?)\s*(?::\s*(\S+)\s*)?\}\}") .expect("could not construct regex"), ) } pub fn filter_valid_anchors(starts: Vec<Anchor>, ends: Vec<Anchor>) -> (Vec<Anchor>, Vec<Error>) { let find_anchor_end_by_name = |anchor_name: &str, file: &Path| { ends.iter() .filter(|el| el.name == *anchor_name && el.file == file) .collect::<Vec<_>>() }; let (pairs, errors):(Vec<_>, Vec<_>) = starts.into_iter().map(|start| { let matches_by_name = find_anchor_end_by_name(&start.name, &start.file); let (begin, end) = match matches_by_name.as_slice() { [single_match] => Ok((start, (*single_match).clone())), [] => Err(anyhow!("Couldn't find a matching end anchor for {start:?}")), multiple_ends => Err(anyhow!("Found too many matching anchor ends for anchor: {start:?}. The matching ends are: {multiple_ends:?}")), }?; match check_validity_of_anchor_pair(&begin, &end) { None => Ok((begin, end)), Some(err) => { let err_msg = err.to_string(); Err(anyhow!("{err_msg}")) } } }).partition_result(); let additional_errors = filter_unused_ends(&ends, &pairs) .into_iter() .map(|unused_end| anyhow!("Missing anchor start for {unused_end:?}")) .collect::<Vec<_>>(); let start_only = pairs.into_iter().map(|(begin, _)| begin).collect(); (start_only, chain!(errors, additional_errors).collect()) } pub fn filter_unused_ends<'a>(ends: &'a [Anchor], pairs: &[(Anchor, Anchor)]) -> Vec<&'a Anchor> { ends.iter() .filter(|end| { let end_used_in_pairs = pairs.iter().any(|(_, used_end)| *end == used_end); !end_used_in_pairs }) .collect() } pub fn check_validity_of_anchor_pair(begin: &Anchor, end: &Anchor) -> Option<anyhow::Error> { if begin.line_no > end.line_no { Some(anyhow!( "The end of the anchor appears before the beginning. End anchor: {end:?}. Begin anchor: {begin:?}" )) } else { None } } #[derive(Debug, Clone, Hash, Eq, PartialEq)] pub struct Anchor { pub line_no: usize, pub name: String, pub file: PathBuf, } pub fn extract_starts_and_ends( text_w_anchors: &str, ) -> anyhow::Result<(Vec<Anchor>, Vec<Anchor>), Error> { let apply_regex = |regex: Regex| { text_w_anchors .lines() .filter_map(|line| regex.captures(line)) .map(|capture| { let file = PathBuf::from(&capture[1]).canonicalize()?; let line_no = &capture[2]; let anchor_name = &capture[3]; Ok(Anchor { line_no: line_no.parse()?, name: anchor_name.to_string(), file, }) }) .collect::<Result<Vec<_>, Error>>() }; let begins = apply_regex(Regex::new( r"^(.+):(\d+):\s*(?:/{2,}|/\*)\s*ANCHOR\s*:\s*([\w_-]+)\s*(?:\*/)?", )?)?; let ends = apply_regex(Regex::new( r"^(.+):(\d+):\s*(?:/{2,}|/\*)\s*ANCHOR_END\s*:\s*([\w_-]+)\s*(?:\*/)?", )?)?; Ok((begins, ends)) } pub fn parse_md_files(text_w_files: String, path: &str) -> HashSet<PathBuf> { let regex = Regex::new(r"\((.*\.md)\)").expect("could not construct regex"); text_w_files .lines() .filter_map(|line| regex.captures(line)) .map(|capture| { let path = PathBuf::from(path).join(&capture[1]); path.canonicalize() .unwrap_or_else(|e| panic!("could not canonicalize md path: {e} {path:?}")) }) .collect() } pub fn validate_md_files( md_files_summary: HashSet<PathBuf>, md_files_in_src: String, ) -> Vec<Error> { md_files_in_src .lines() .filter_map(|file| { let file = PathBuf::from(file) .canonicalize() .expect("could not canonicalize md path"); (!md_files_summary.contains(&file)) .then(|| anyhow!("file `{}` not in SUMMARY.md", file.to_str().unwrap())) }) .collect() } pub fn search_for_pattern(pattern: &str, location: &str) -> anyhow::Result<String> { let grep_project = std::process::Command::new("grep") .arg("-H") // print filename .arg("-n") // print line-number .arg("-r") // search recursively .arg("--binary-files=without-match") .arg("--exclude-dir=check-docs") .arg(pattern) .arg(location) .output() .expect("failed grep command"); if !grep_project.status.success() { bail!("Failed running `grep` command for pattern '{}'", pattern); } Ok(String::from_utf8(grep_project.stdout)?) } pub fn find_files(pattern: &str, location: &str, exclude: &str) -> anyhow::Result<String> { let find = std::process::Command::new("find") .args([ location, "-type", "f", "-name", pattern, "!", "-name", exclude, ]) .output() .expect("Program `find` not in PATH"); if !find.status.success() { bail!("Failed running `find` command for pattern {}", pattern); } Ok(String::from_utf8(find.stdout)?) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/check-docs/src/main.rs
scripts/check-docs/src/main.rs
use anyhow::{Error, bail}; use check_docs::{ extract_starts_and_ends, filter_valid_anchors, find_files, parse_includes, parse_md_files, report_errors, search_for_pattern, validate_includes, validate_md_files, }; fn main() -> anyhow::Result<(), Error> { let text_w_anchors = search_for_pattern("ANCHOR", ".")?; let (starts, ends) = extract_starts_and_ends(&text_w_anchors)?; let (valid_anchors, anchor_errors) = filter_valid_anchors(starts, ends); let text_mentioning_include = search_for_pattern("{{#include", ".")?; let (includes, include_path_errors) = parse_includes(text_mentioning_include); let (include_errors, additional_warnings) = validate_includes(includes, valid_anchors); let text_with_md_files = search_for_pattern(".md", "./docs/src/SUMMARY.md")?; let md_files_in_summary = parse_md_files(text_with_md_files, "./docs/src/"); let md_files_in_src = find_files("*.md", "./docs/src/", "SUMMARY.md")?; let md_files_errors = validate_md_files(md_files_in_summary, md_files_in_src); report_errors("warning", &additional_warnings); report_errors("include paths", &include_path_errors); report_errors("anchors", &anchor_errors); report_errors("includes", &include_errors); report_errors("md files", &md_files_errors); if !anchor_errors.is_empty() || !include_errors.is_empty() || !include_path_errors.is_empty() || !additional_warnings.is_empty() || !md_files_errors.is_empty() { bail!("Finished with errors"); } Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/check-docs/tests/harness.rs
scripts/check-docs/tests/harness.rs
use anyhow::Error; use check_docs::{ Anchor, Include, extract_starts_and_ends, filter_valid_anchors, find_files, parse_includes, parse_md_files, search_for_pattern, validate_includes, validate_md_files, }; enum TestEnum { Anchor(Vec<Anchor>), Include(Vec<Include>), Errors(Vec<Error>), } fn contains_any(vec: &TestEnum, str: &str) -> bool { match vec { TestEnum::Anchor(anchor_vec) => anchor_vec.iter().any(|anchor| anchor.name == str), TestEnum::Include(include_vec) => { include_vec.iter().any(|include| include.anchor_name == str) } TestEnum::Errors(err_vec) => err_vec.iter().any(|err| err.to_string().contains(str)), } } #[test] fn test_anchors() -> anyhow::Result<()> { let test_data = search_for_pattern("ANCHOR", ".")?; let (starts, ends) = extract_starts_and_ends(&test_data)?; let (valid_anchors, anchor_errors) = filter_valid_anchors(starts, ends); let valid_vec = TestEnum::Anchor(valid_anchors.clone()); let anchor_err_vec = TestEnum::Errors(anchor_errors); assert!(contains_any(&valid_vec, "test_anchor_line_comment")); assert!(contains_any(&valid_vec, "test_anchor_block_comment")); assert!(contains_any(&valid_vec, "test_with_more_forward_slashes")); assert!(!contains_any(&valid_vec, "no_anchor_with_this_name")); assert!(contains_any( &anchor_err_vec, "Missing anchor start for Anchor { line_no: 10, name: \"test_no_anchor_beginning\"" )); assert!(contains_any( &anchor_err_vec, "Couldn't find a matching end anchor for Anchor { line_no: 12, name: \"test_no_anchor_end\"" )); assert!(contains_any( &anchor_err_vec, "The end of the anchor appears before the beginning. End anchor: Anchor { line_no: 14, name: \"test_end_before_beginning\"" )); assert!(contains_any( &anchor_err_vec, "Found too many matching anchor ends for anchor: Anchor { line_no: 17, name: \"test_same_name_multiple_time\"" )); assert!(contains_any( &anchor_err_vec, "Found too many matching anchor ends for anchor: Anchor { line_no: 20, name: \"test_same_name_multiple_time\"" )); // Caused by too many matching anchors assert!(contains_any( &anchor_err_vec, "Missing anchor start for Anchor { line_no: 18, name: \"test_same_name_multiple_time\"" )); assert!(contains_any( &anchor_err_vec, "Missing anchor start for Anchor { line_no: 21, name: \"test_same_name_multiple_time\"" )); let text_mentioning_include = search_for_pattern("{{#include", ".")?; let (includes, include_path_errors) = parse_includes(text_mentioning_include); let includes_vec = TestEnum::Include(includes.clone()); assert!(contains_any(&includes_vec, "test_anchor_line_comment")); assert!(contains_any(&includes_vec, "test_anchor_block_comment")); assert!(contains_any( &includes_vec, "test_with_more_forward_slashes" )); assert!(contains_any(&includes_vec, "")); //Check the file include without anchor let include_path_errors = TestEnum::Errors(include_path_errors); assert!(contains_any( &include_path_errors, "test_anchor_data2.rs\" when canonicalized gives error Os { code: 2, kind: NotFound" )); assert!(contains_any( &include_path_errors, "test_anchor_data3.rs\" when canonicalized gives error Os { code: 2, kind: NotFound" )); let (include_errors, _) = validate_includes(includes, valid_anchors); let include_err_vec = TestEnum::Errors(include_errors); assert!(contains_any( &include_err_vec, "No anchor available to satisfy include Include { anchor_name: \"no_existing_anchor\"" )); Ok(()) } #[test] fn test_unused_md() -> anyhow::Result<()> { let text_with_md_files = search_for_pattern(".md", "./tests/test_data/docs/src/SUMMARY.md")?; let md_files_in_summary = parse_md_files(text_with_md_files, "./tests/test_data/docs/src/"); let md_files_in_src = find_files("*.md", "./tests/test_data/docs/src/", "SUMMARY.md")?; let md_files_errors = validate_md_files(md_files_in_summary, md_files_in_src); let error_msg = md_files_errors.first().unwrap().to_string(); assert!(error_msg.contains("test-not-there.md` not in SUMMARY.md")); Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/check-docs/tests/test_data/test_anchor_data.rs
scripts/check-docs/tests/test_data/test_anchor_data.rs
// ANCHOR: test_anchor_line_comment ///// ANCHOR_END: test_anchor_line_comment /* ANCHOR: test_anchor_block_comment */ /* ANCHOR_END: test_anchor_block_comment */ // ANCHOR: test_with_more_forward_slashes ///// ANCHOR_END: test_with_more_forward_slashes // ANCHOR_END: test_no_anchor_beginning // ANCHOR: test_no_anchor_end // ANCHOR_END: test_end_before_beginning // ANCHOR: test_end_before_beginning // ANCHOR: test_same_name_multiple_time // ANCHOR_END: test_same_name_multiple_time // ANCHOR: test_same_name_multiple_time // ANCHOR_END: test_same_name_multiple_time
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/adapters.rs
scripts/change-log/src/adapters.rs
pub mod octocrab;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/lib.rs
scripts/change-log/src/lib.rs
pub mod adapters; pub mod domain; pub mod ports;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/ports.rs
scripts/change-log/src/ports.rs
pub mod github;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/main.rs
scripts/change-log/src/main.rs
use std::env; use change_log::{ adapters::octocrab::OctocrabAdapter, domain::changelog::generate_changelog, ports::github::GitHubPort, }; use dialoguer::FuzzySelect; use dotenv::dotenv; #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { dotenv().ok(); let github_token = env::var("GITHUB_TOKEN").expect("GITHUB_TOKEN is not set in the environment"); let repo_owner = env::var("GITHUB_REPOSITORY_OWNER").unwrap_or_else(|_| "FuelLabs".to_string()); let repo_name = env::var("GITHUB_REPOSITORY_NAME").unwrap_or_else(|_| "fuels-rs".to_string()); let github_adapter = OctocrabAdapter::new(&github_token); let branches = { let mut branches = vec!["master".to_string()]; let lts_branches = github_adapter .search_branches(&repo_owner, &repo_name, "lts/") .await?; branches.extend(lts_branches); branches }; let branch_selection = FuzzySelect::new() .with_prompt("Select the target branch (start typing to filter)") .items(&branches) .default(0) .interact()?; let target_branch = branches[branch_selection].clone(); let releases = github_adapter.get_releases(&repo_owner, &repo_name).await?; if releases.is_empty() { return Err("No releases found for the repository".into()); } let release_selection = FuzzySelect::new() .with_prompt("Select the previous release tag") .items(&releases) .default(0) .interact()?; let previous_release_tag = releases[release_selection].clone(); eprintln!("Using branch: {}", target_branch); eprintln!("Using previous release: {}", previous_release_tag); let changelog_infos = github_adapter .get_changelog_infos( &repo_owner, &repo_name, &previous_release_tag, &target_branch, ) .await?; let changelog_markdown = generate_changelog(changelog_infos); println!("{changelog_markdown}"); Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/domain.rs
scripts/change-log/src/domain.rs
pub mod changelog; pub mod models;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/domain/changelog.rs
scripts/change-log/src/domain/changelog.rs
use std::collections::{HashMap, HashSet}; use crate::domain::models::ChangelogInfo; fn category_from_pr_type(pr_type: &str) -> Option<&'static str> { match pr_type.trim_end_matches('!') { "feat" => Some("Features"), "fix" => Some("Fixes"), "chore" => Some("Chores"), _ => None, } } pub fn generate_changelog(changelogs: Vec<ChangelogInfo>) -> String { let mut content = String::new(); let mut non_breaking: HashMap<&str, Vec<String>> = HashMap::new(); let mut breaking: HashMap<&str, Vec<String>> = HashMap::new(); let mut migration_notes: Vec<String> = Vec::new(); let mut summary_set: HashSet<String> = HashSet::new(); for changelog in &changelogs { if !changelog.release_notes.is_empty() { summary_set.insert(changelog.release_notes.clone()); } if let Some(category) = category_from_pr_type(&changelog.pr_type) { if changelog.is_breaking { breaking .entry(category) .or_default() .push(changelog.bullet_point.clone()); migration_notes.push(changelog.migration_note.clone()); } else { non_breaking .entry(category) .or_default() .push(changelog.bullet_point.clone()); } } } if !summary_set.is_empty() { content.push_str("# Summary\n\nIn this release, we:\n"); let mut summary_lines: Vec<String> = summary_set.into_iter().collect(); summary_lines.sort(); for line in summary_lines { content.push_str(&format!("{}\n", line)); } content.push('\n'); } let categories = ["Features", "Fixes", "Chores"]; if !breaking.is_empty() { content.push_str("# Breaking\n\n"); for cat in &categories { if let Some(items) = breaking.get(cat) { content.push_str(&format!("- {}\n", cat)); let indented = items .iter() .map(|s| format!("\t{}", s)) .collect::<Vec<_>>() .join("\n"); content.push_str(&format!("{}\n\n", indented)); } } } let mut write_section = |title: &str, items: &[String]| { if !items.is_empty() { content.push_str(&format!("# {}\n\n", title)); content.push_str(&format!("{}\n\n", items.join("\n\n"))); } }; for cat in &categories { if let Some(items) = non_breaking.get(cat) { write_section(cat, items); } } if !migration_notes.is_empty() { write_section("Migration Notes", &migration_notes); } content.trim().to_string() } /// Utility function to capitalize a string. pub fn capitalize(s: &str) -> String { let mut c = s.chars(); match c.next() { None => String::new(), Some(f) => f.to_uppercase().collect::<String>() + c.as_str(), } } #[cfg(test)] mod tests { use super::*; use crate::domain::models::ChangelogInfo; #[test] fn test_generate_changelog_exact() { let changelog1 = ChangelogInfo { is_breaking: false, pr_type: "feat".to_string(), bullet_point: "- [#1](http://example.com) - Added feature, by @alice".to_string(), migration_note: "".to_string(), release_notes: "Added feature".to_string(), }; let changelog2 = ChangelogInfo { is_breaking: true, pr_type: "fix!".to_string(), bullet_point: "- [#2](http://example.com) - Fixed bug, by @bob".to_string(), migration_note: "### [2 - Fixed bug](http://example.com)\n\nCritical fix".to_string(), release_notes: "Fixed bug".to_string(), }; let changelog3 = ChangelogInfo { is_breaking: false, pr_type: "chore".to_string(), bullet_point: "- [#3](http://example.com) - Update dependencies, by @carol".to_string(), migration_note: "".to_string(), release_notes: "".to_string(), }; let changelogs = vec![changelog1, changelog2, changelog3]; let markdown = generate_changelog(changelogs); let expected = "\ # Summary In this release, we: Added feature Fixed bug # Breaking - Fixes \t- [#2](http://example.com) - Fixed bug, by @bob # Features - [#1](http://example.com) - Added feature, by @alice # Chores - [#3](http://example.com) - Update dependencies, by @carol # Migration Notes ### [2 - Fixed bug](http://example.com) Critical fix"; assert_eq!(markdown, expected); } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/domain/models.rs
scripts/change-log/src/domain/models.rs
#[derive(Debug, Clone)] pub struct ChangelogInfo { pub is_breaking: bool, pub pr_type: String, pub bullet_point: String, pub migration_note: String, pub release_notes: String, }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/ports/github.rs
scripts/change-log/src/ports/github.rs
use crate::domain::models::ChangelogInfo; #[allow(async_fn_in_trait)] pub trait GitHubPort { /// Retrieve a collection of changelog infos based on the commit comparison between `base` and `head`. async fn get_changelog_infos( &self, owner: &str, repo: &str, base: &str, head: &str, ) -> Result<Vec<ChangelogInfo>, Box<dyn std::error::Error>>; /// Retrieve the latest release tag for the given repository. async fn get_latest_release_tag( &self, owner: &str, repo: &str, ) -> Result<String, Box<dyn std::error::Error>>; }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/scripts/change-log/src/adapters/octocrab.rs
scripts/change-log/src/adapters/octocrab.rs
use octocrab::{Octocrab, models::pulls::PullRequest}; use regex::Regex; use serde_json::Value; use crate::{ domain::{changelog::capitalize, models::ChangelogInfo}, ports::github::GitHubPort, }; pub struct OctocrabAdapter { client: Octocrab, } impl OctocrabAdapter { pub fn new(token: &str) -> Self { let client = Octocrab::builder() .personal_token(token.to_string()) .build() .unwrap(); Self { client } } /// Retrieve the pull request associated with a commit SHA. async fn get_pr_for_commit( &self, owner: &str, repo: &str, commit_sha: &str, ) -> Result<PullRequest, Box<dyn std::error::Error>> { let pr_info = self .client .repos(owner, repo) .list_pulls(commit_sha.to_string()) .send() .await?; if pr_info.items.is_empty() { return Err("No PR found for this commit SHA".into()); } let pr = pr_info.items.into_iter().next().unwrap(); // Ignore PRs from "fuel-service-user" if pr.user.as_ref().map_or("", |u| &u.login) == "fuel-service-user" { return Err("PR from fuel-service-user ignored".into()); } Ok(pr) } pub async fn search_branches( &self, owner: &str, repo: &str, query: &str, ) -> Result<Vec<String>, Box<dyn std::error::Error>> { let payload = serde_json::json!({ "query": r#" query($owner: String!, $repo: String!, $query: String!) { repository(owner: $owner, name: $repo) { refs(refPrefix: "refs/heads/", query: $query, first: 100) { nodes { name } } } } "#, "variables": { "owner": owner, "repo": repo, "query": query, } }); let response: Value = self.client.graphql(&payload).await?; let nodes = response["data"]["repository"]["refs"]["nodes"] .as_array() .ok_or("Could not parse branch nodes from response")?; let branch_names = nodes .iter() .filter_map(|node| node["name"].as_str().map(|s| s.to_owned())) .collect(); Ok(branch_names) } /// Query GitHub for all releases in the repository. pub async fn get_releases( &self, owner: &str, repo: &str, ) -> Result<Vec<String>, Box<dyn std::error::Error>> { let releases = self .client .repos(owner, repo) .releases() .list() .per_page(100) .send() .await?; let release_tags = releases .items .into_iter() .map(|release| release.tag_name) .collect(); Ok(release_tags) } /// Build a ChangelogInfo instance from a commit. async fn build_changelog_info( &self, owner: &str, repo: &str, commit_sha: &str, ) -> Result<ChangelogInfo, Box<dyn std::error::Error>> { let pr = self.get_pr_for_commit(owner, repo, commit_sha).await?; let pr_title_full = pr.title.as_ref().unwrap_or(&"".to_string()).clone(); let pr_type = pr_title_full .split(':') .next() .unwrap_or("misc") .to_string(); let is_breaking = pr_title_full.contains('!'); let title_description = pr_title_full .split(':') .nth(1) .unwrap_or("") .trim() .to_string(); let pr_number = pr.number; let pr_author = pr.user.as_ref().map_or("", |u| &u.login).to_string(); let pr_url = pr.html_url.map(|u| u.to_string()).unwrap_or_default(); let bullet_point = format!( "- [#{}]({}) - {}, by @{}", pr_number, pr_url, title_description, pr_author ); let breaking_changes_regex = Regex::new(r"(?s)# Breaking Changes\s*(.*)")?; let breaking_changes = breaking_changes_regex .captures(pr.body.as_ref().unwrap_or(&String::new())) .and_then(|cap| cap.get(1)) .map(|m| { m.as_str() .split("\n# ") .next() .unwrap_or("") .trim() .to_string() }) .unwrap_or_default(); let release_notes_regex = Regex::new(r"(?s)In this release, we:\s*(.*)")?; let release_notes = release_notes_regex .captures(pr.body.as_ref().unwrap_or(&String::new())) .and_then(|cap| cap.get(1)) .map(|m| { m.as_str() .split("\n# ") .next() .unwrap_or("") .trim() .to_string() }) .unwrap_or_default(); let migration_note = format!( "### [{} - {}]({})\n\n{}", pr_number, capitalize(&title_description), pr_url, breaking_changes ); Ok(ChangelogInfo { is_breaking, pr_type, bullet_point, migration_note, release_notes, }) } } impl GitHubPort for OctocrabAdapter { async fn get_changelog_infos( &self, owner: &str, repo: &str, base: &str, head: &str, ) -> Result<Vec<ChangelogInfo>, Box<dyn std::error::Error>> { let comparison = self .client .commits(owner, repo) .compare(base, head) .send() .await?; let mut changelogs = Vec::new(); for commit in comparison.commits { match self.build_changelog_info(owner, repo, &commit.sha).await { Ok(info) => changelogs.push(info), Err(e) => { eprintln!("Error retrieving PR for commit {}: {}", commit.sha, e); continue; } } } changelogs.sort_by(|a, b| a.pr_type.cmp(&b.pr_type)); Ok(changelogs) } async fn get_latest_release_tag( &self, owner: &str, repo: &str, ) -> Result<String, Box<dyn std::error::Error>> { let latest_release = self .client .repos(owner, repo) .releases() .get_latest() .await?; Ok(latest_release.tag_name) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/wasm-tests/src/lib.rs
wasm-tests/src/lib.rs
extern crate alloc; #[cfg(all(test, target_arch = "wasm32"))] mod tests { use std::{default::Default, str::FromStr}; use fuels::{ accounts::predicate::Predicate, core::{codec::ABIEncoder, traits::Tokenizable}, macros::wasm_abigen, programs::debug::ScriptType, types::{AssetId, bech32::Bech32Address, errors::Result}, }; use fuels_core::codec::abi_formatter::ABIFormatter; use wasm_bindgen_test::wasm_bindgen_test; #[wasm_bindgen_test] fn decoding_and_encoding() -> Result<()> { wasm_abigen!(Contract( name = "no_name", // abi generated with: "e2e/sway/abi/wasm_contract" abi = r#" { "programType": "contract", "specVersion": "1", "encodingVersion": "1", "concreteTypes": [ { "type": "()", "concreteTypeId": "2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d" }, { "type": "enum SomeEnum<struct SomeStruct>", "concreteTypeId": "744ffecb34b691a157f3f4b4657ea215fd23e3cc79fd7a3b7f15431751b46134", "metadataTypeId": 1, "typeArguments": [ "c672b07b5808bcc04715d73ca6d42eaabd332266144c1017c20833ef05a4a484" ] }, { "type": "struct SomeStruct", "concreteTypeId": "c672b07b5808bcc04715d73ca6d42eaabd332266144c1017c20833ef05a4a484", "metadataTypeId": 3 } ], "metadataTypes": [ { "type": "bool", "metadataTypeId": 0 }, { "type": "enum SomeEnum", "metadataTypeId": 1, "components": [ { "name": "V1", "typeId": "2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d" }, { "name": "V2", "typeId": 2 } ], "typeParameters": [ 2 ] }, { "type": "generic T", "metadataTypeId": 2 }, { "type": "struct SomeStruct", "metadataTypeId": 3, "components": [ { "name": "a", "typeId": 4 }, { "name": "b", "typeId": 0 } ] }, { "type": "u32", "metadataTypeId": 4 } ], "functions": [ { "inputs": [ { "name": "_arg", "concreteTypeId": "744ffecb34b691a157f3f4b4657ea215fd23e3cc79fd7a3b7f15431751b46134" } ], "name": "test_function", "output": "2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d" } ], "loggedTypes": [], "messagesTypes": [], "configurables": [] } "# )); let original = SomeEnum::V2(SomeStruct { a: 123, b: false }); let bytes = ABIEncoder::default().encode(&[original.clone().into_token()])?; let expected_bytes = [ 0, 0, 0, 0, 0, 0, 0, 1, // enum discriminant 0, 0, 0, 123, 0, // SomeStruct ] .to_vec(); assert_eq!(expected_bytes, bytes); let reconstructed = bytes.try_into().unwrap(); assert_eq!(original, reconstructed); Ok(()) } #[wasm_bindgen_test] fn predicate_abigen() -> Result<()> { wasm_abigen!(Predicate( name = "MyPredicate", // abi generated with: "e2e/sway/abi/wasm_predicate" abi = r#" { "programType": "predicate", "specVersion": "1", "encodingVersion": "1", "concreteTypes": [ { "type": "bool", "concreteTypeId": "b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903" }, { "type": "u64", "concreteTypeId": "1506e6f44c1d6291cdf46395a8e573276a4fa79e8ace3fc891e092ef32d1b0a0" } ], "metadataTypes": [], "functions": [ { "inputs": [ { "name": "val", "concreteTypeId": "1506e6f44c1d6291cdf46395a8e573276a4fa79e8ace3fc891e092ef32d1b0a0" } ], "name": "main", "output": "b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903", "attributes": null } ], "loggedTypes": [], "messagesTypes": [], "configurables": [ { "name": "U64", "concreteTypeId": "1506e6f44c1d6291cdf46395a8e573276a4fa79e8ace3fc891e092ef32d1b0a0", "offset": 376 } ] } "# )); let code = vec![ 26, 24, 48, 0, 116, 0, 0, 2, 0, 0, 0, 0, 0, 0, 1, 12, 93, 255, 192, 1, 16, 255, 255, 0, 145, 0, 0, 8, 8, 235, 24, 0, 8, 228, 0, 8, 8, 224, 64, 0, 32, 248, 51, 0, 88, 251, 224, 2, 8, 251, 224, 4, 116, 0, 0, 28, 26, 236, 8, 0, 145, 0, 0, 16, 113, 64, 0, 3, 97, 69, 2, 0, 19, 73, 16, 0, 118, 72, 0, 6, 114, 72, 0, 2, 19, 69, 2, 128, 118, 68, 0, 1, 54, 0, 0, 0, 97, 65, 2, 74, 116, 0, 0, 1, 97, 65, 2, 12, 95, 237, 0, 1, 8, 67, 176, 8, 26, 233, 0, 0, 32, 248, 51, 0, 88, 251, 224, 2, 8, 251, 224, 4, 116, 0, 0, 32, 26, 67, 28, 0, 26, 233, 0, 0, 32, 248, 51, 0, 88, 251, 224, 2, 8, 251, 224, 4, 116, 0, 0, 42, 26, 67, 28, 0, 36, 64, 0, 0, 149, 0, 0, 15, 15, 8, 0, 0, 26, 236, 8, 0, 145, 0, 0, 16, 26, 67, 16, 0, 26, 71, 128, 0, 26, 75, 224, 0, 95, 237, 0, 0, 26, 235, 176, 0, 32, 248, 51, 0, 88, 251, 224, 2, 8, 251, 224, 4, 116, 0, 0, 11, 26, 67, 28, 0, 95, 237, 0, 1, 8, 67, 176, 8, 114, 76, 0, 8, 4, 69, 4, 192, 26, 244, 0, 0, 146, 0, 0, 16, 26, 249, 32, 0, 152, 8, 0, 0, 151, 0, 0, 15, 74, 248, 0, 0, 149, 0, 0, 31, 15, 8, 0, 0, 26, 236, 8, 0, 26, 83, 16, 0, 26, 67, 224, 0, 93, 69, 64, 0, 93, 69, 16, 0, 93, 73, 64, 0, 114, 76, 0, 8, 16, 73, 36, 192, 95, 81, 32, 0, 26, 245, 16, 0, 26, 249, 0, 0, 152, 8, 0, 0, 151, 0, 0, 31, 74, 248, 0, 0, 149, 0, 0, 7, 15, 8, 0, 0, 26, 236, 8, 0, 26, 67, 16, 0, 26, 71, 224, 0, 93, 72, 64, 0, 19, 65, 4, 128, 26, 245, 0, 0, 26, 249, 16, 0, 152, 8, 0, 0, 151, 0, 0, 7, 74, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, ]; let value = 129; let predicate_data = MyPredicateEncoder::default().encode_data(value)?; let configurables = MyPredicateConfigurables::default().with_U64(value)?; let predicate: Predicate = Predicate::from_code(code.clone()) .with_data(predicate_data) .with_configurables(configurables); let mut expected_code = code.clone(); *expected_code.last_mut().unwrap() = value as u8; assert_eq!(*predicate.code(), expected_code); let expected_address = Bech32Address::from_str( "fuel1c7rzx6ljxdz8egkcfjswffe7w8u06rm4nfvyu4lelyjua7qlcmdss9jkjm", )?; assert_eq!(*predicate.address(), expected_address); Ok(()) } #[wasm_bindgen_test] fn can_decode_a_contract_calling_script() -> Result<()> { let script = hex::decode( "724028d8724428b05d451000724828b82d41148a724029537244292b5d451000724829332d41148a24040000", )?; let script_data = hex::decode( "000000000000000a00000000000000000000000000000000000000000000000000000000000000001e62ecaa5c32f1e51954f46149d5e542472bdba45838199406464af46ab147ed000000000000290800000000000029260000000000000016636865636b5f7374727563745f696e746567726974790000000201000000000000001400000000000000000000000000000000000000000000000000000000000000001e62ecaa5c32f1e51954f46149d5e542472bdba45838199406464af46ab147ed000000000000298300000000000029a20000000000000017695f616d5f63616c6c65645f646966666572656e746c7900000002011e62ecaa5c32f1e51954f46149d5e542472bdba45838199406464af46ab147ed000000000000007b00000000000001c8", )?; let abi = r#"{ "programType": "contract", "specVersion": "1", "encodingVersion": "1", "concreteTypes": [ { "type": "()", "concreteTypeId": "2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d" }, { "type": "bool", "concreteTypeId": "b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903" }, { "type": "struct AllStruct", "concreteTypeId": "91804f0112892169cddf041007c9f16f95281d45c3f363e544c33dffc8179266", "metadataTypeId": 1 }, { "type": "struct CallData", "concreteTypeId": "c1b2644ef8de5c5b7a95aaadf3f5cedd40f42286d459bcd051c3cc35fa1ce5ec", "metadataTypeId": 2 }, { "type": "struct MemoryAddress", "concreteTypeId": "0b7b6a791f80f65fe493c3e0d0283bf8206871180c9b696797ff0098ff63b474", "metadataTypeId": 3 } ], "metadataTypes": [ { "type": "b256", "metadataTypeId": 0 }, { "type": "struct AllStruct", "metadataTypeId": 1, "components": [ { "name": "some_struct", "typeId": 4 } ] }, { "type": "struct CallData", "metadataTypeId": 2, "components": [ { "name": "memory_address", "typeId": 3 }, { "name": "num_coins_to_forward", "typeId": 7 }, { "name": "asset_id_of_coins_to_forward", "typeId": 5 }, { "name": "amount_of_gas_to_forward", "typeId": 7 } ] }, { "type": "struct MemoryAddress", "metadataTypeId": 3, "components": [ { "name": "contract_id", "typeId": 5 }, { "name": "function_selector", "typeId": 7 }, { "name": "function_data", "typeId": 7 } ] }, { "type": "struct SomeStruct", "metadataTypeId": 4, "components": [ { "name": "field", "typeId": 6 }, { "name": "field_2", "typeId": "b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903" } ] }, { "type": "struct std::contract_id::ContractId", "metadataTypeId": 5, "components": [ { "name": "bits", "typeId": 0 } ] }, { "type": "u32", "metadataTypeId": 6 }, { "type": "u64", "metadataTypeId": 7 } ], "functions": [ { "inputs": [ { "name": "arg", "concreteTypeId": "91804f0112892169cddf041007c9f16f95281d45c3f363e544c33dffc8179266" } ], "name": "check_struct_integrity", "output": "b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903", "attributes": [ { "name": "payable", "arguments": [] } ] }, { "inputs": [], "name": "get_struct", "output": "91804f0112892169cddf041007c9f16f95281d45c3f363e544c33dffc8179266", "attributes": null }, { "inputs": [ { "name": "arg1", "concreteTypeId": "91804f0112892169cddf041007c9f16f95281d45c3f363e544c33dffc8179266" }, { "name": "arg2", "concreteTypeId": "0b7b6a791f80f65fe493c3e0d0283bf8206871180c9b696797ff0098ff63b474" } ], "name": "i_am_called_differently", "output": "2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d", "attributes": [ { "name": "payable", "arguments": [] } ] }, { "inputs": [ { "name": "call_data", "concreteTypeId": "c1b2644ef8de5c5b7a95aaadf3f5cedd40f42286d459bcd051c3cc35fa1ce5ec" } ], "name": "nested_struct_with_reserved_keyword_substring", "output": "c1b2644ef8de5c5b7a95aaadf3f5cedd40f42286d459bcd051c3cc35fa1ce5ec", "attributes": null } ], "loggedTypes": [], "messagesTypes": [], "configurables": [] }"#; let decoder = ABIFormatter::from_json_abi(abi)?; // when let script_type = ScriptType::detect(&script, &script_data)?; // then let ScriptType::ContractCall(call_descriptions) = script_type else { panic!("expected a contract call") }; assert_eq!(call_descriptions.len(), 2); let call_description = &call_descriptions[0]; let expected_contract_id = "1e62ecaa5c32f1e51954f46149d5e542472bdba45838199406464af46ab147ed".parse()?; assert_eq!(call_description.contract_id, expected_contract_id); assert_eq!(call_description.amount, 10); assert_eq!(call_description.asset_id, AssetId::default()); assert_eq!( call_description.decode_fn_selector().unwrap(), "check_struct_integrity" ); assert!(call_description.gas_forwarded.is_none()); assert_eq!( decoder.decode_fn_args( &call_description.decode_fn_selector().unwrap(), &call_description.encoded_args )?, vec!["AllStruct { some_struct: SomeStruct { field: 2, field_2: true } }"] ); let call_description = &call_descriptions[1]; assert_eq!(call_description.contract_id, expected_contract_id); assert_eq!(call_description.amount, 20); assert_eq!(call_description.asset_id, AssetId::default()); assert_eq!( call_description.decode_fn_selector().unwrap(), "i_am_called_differently" ); assert!(call_description.gas_forwarded.is_none()); assert_eq!( decoder.decode_fn_args( &call_description.decode_fn_selector().unwrap(), &call_description.encoded_args )?, vec![ "AllStruct { some_struct: SomeStruct { field: 2, field_2: true } }", "MemoryAddress { contract_id: std::contract_id::ContractId { bits: Bits256([30, 98, 236, 170, 92, 50, 241, 229, 25, 84, 244, 97, 73, 213, 229, 66, 71, 43, 219, 164, 88, 56, 25, 148, 6, 70, 74, 244, 106, 177, 71, 237]) }, function_selector: 123, function_data: 456 }" ] ); Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/build.rs
e2e/build.rs
use std::{ io::Cursor, path::{Path, PathBuf}, }; use flate2::read::GzDecoder; use fuels_accounts::provider::SUPPORTED_FUEL_CORE_VERSION; use tar::Archive; struct Downloader { dir: PathBuf, } impl Downloader { const EXECUTOR_FILE_NAME: &'static str = "fuel-core-wasm-executor.wasm"; pub fn new() -> Self { let env = std::env::var("OUT_DIR").unwrap(); let out_dir = Path::new(&env); Self { dir: out_dir.to_path_buf(), } } pub fn should_download(&self) -> anyhow::Result<bool> { if !self.executor_path().exists() { return Ok(true); } if !self.version_path().exists() { return Ok(true); } let saved_version = semver::Version::parse(&std::fs::read_to_string(self.version_path())?)?; if saved_version != SUPPORTED_FUEL_CORE_VERSION { return Ok(true); } Ok(false) } pub fn download(&self) -> anyhow::Result<()> { std::fs::create_dir_all(&self.dir)?; const LINK_TEMPLATE: &str = "https://github.com/FuelLabs/fuel-core/releases/download/vVERSION/fuel-core-VERSION-x86_64-unknown-linux-gnu.tar.gz"; let link = LINK_TEMPLATE.replace("VERSION", &SUPPORTED_FUEL_CORE_VERSION.to_string()); let response = reqwest::blocking::Client::builder() .timeout(std::time::Duration::from_secs(60)) .build()? .get(link) .send()?; if !response.status().is_success() { anyhow::bail!("Failed to download wasm executor: {}", response.status()); } let mut content = Cursor::new(response.bytes()?); let mut archive = Archive::new(GzDecoder::new(&mut content)); let mut extracted = false; let executor_in_tar = Path::new(&format!( "fuel-core-{SUPPORTED_FUEL_CORE_VERSION}-x86_64-unknown-linux-gnu" )) .join(Self::EXECUTOR_FILE_NAME); for entry in archive.entries()? { let mut entry = entry?; if entry.path()? == executor_in_tar { entry.unpack(self.executor_path())?; std::fs::write( self.version_path(), format!("{SUPPORTED_FUEL_CORE_VERSION}"), )?; extracted = true; break; } } if !extracted { anyhow::bail!("Failed to extract wasm executor from the archive"); } Ok(()) } fn make_cargo_watch_downloaded_files(&self) { let executor_path = self.executor_path(); println!("cargo:rerun-if-changed={}", executor_path.display()); let version_path = self.version_path(); println!("cargo:rerun-if-changed={}", version_path.display()); } fn executor_path(&self) -> PathBuf { self.dir.join(Self::EXECUTOR_FILE_NAME) } fn version_path(&self) -> PathBuf { self.dir.join("version.rs") } } fn main() { println!("cargo:rerun-if-changed=build.rs"); let downloader = Downloader::new(); downloader.make_cargo_watch_downloaded_files(); if downloader.should_download().unwrap() { downloader.download().unwrap(); } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/src/lib.rs
e2e/src/lib.rs
mod aws_kms; pub mod e2e_helpers;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/src/e2e_helpers.rs
e2e/src/e2e_helpers.rs
use fuels::types::errors::Result; use crate::aws_kms::{AwsKms, AwsKmsProcess}; pub async fn start_aws_kms(logs: bool) -> Result<AwsKmsProcess> { AwsKms::default().with_show_logs(logs).start().await }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/src/aws_kms.rs
e2e/src/aws_kms.rs
use fuels::{ accounts::signers::kms::aws::{ AwsKmsSigner, aws_config::{BehaviorVersion, Region, defaults}, aws_sdk_kms::{ Client, config::Credentials, types::{KeySpec, KeyUsageType}, }, }, prelude::Error, types::errors::{Context, Result}, }; use testcontainers::{core::ContainerPort, runners::AsyncRunner}; use tokio::io::AsyncBufReadExt; #[derive(Default)] pub struct AwsKms { show_logs: bool, } struct AwsKmsImage; impl testcontainers::Image for AwsKmsImage { fn name(&self) -> &str { "localstack/localstack" } fn tag(&self) -> &str { "latest" } fn ready_conditions(&self) -> Vec<testcontainers::core::WaitFor> { vec![testcontainers::core::WaitFor::message_on_stdout("Ready.")] } fn expose_ports(&self) -> &[ContainerPort] { &[ContainerPort::Tcp(4566)] } } impl AwsKms { pub fn with_show_logs(mut self, show_logs: bool) -> Self { self.show_logs = show_logs; self } pub async fn start(self) -> Result<AwsKmsProcess> { let container = AwsKmsImage .start() .await .map_err(|e| Error::Other(e.to_string())) .with_context(|| "Failed to start KMS container")?; if self.show_logs { spawn_log_printer(&container); } let port = container .get_host_port_ipv4(4566) .await .map_err(|e| Error::Other(e.to_string()))?; let url = format!("http://localhost:{}", port); let credentials = Credentials::new("test", "test", None, None, "Static Test Credentials"); let region = Region::new("us-east-1"); let config = defaults(BehaviorVersion::latest()) .credentials_provider(credentials) .endpoint_url(url.clone()) .region(region) .load() .await; let client = Client::new(&config); Ok(AwsKmsProcess { _container: container, client, url, }) } } fn spawn_log_printer(container: &testcontainers::ContainerAsync<AwsKmsImage>) { let stderr = container.stderr(true); let stdout = container.stdout(true); tokio::spawn(async move { let mut stderr_lines = stderr.lines(); let mut stdout_lines = stdout.lines(); let mut other_stream_closed = false; loop { tokio::select! { stderr_result = stderr_lines.next_line() => { match stderr_result { Ok(Some(line)) => eprintln!("KMS (stderr): {}", line), Ok(None) if other_stream_closed => break, Ok(None) => other_stream_closed = true, Err(e) => { eprintln!("KMS: Error reading from stderr: {:?}", e); break; } } } stdout_result = stdout_lines.next_line() => { match stdout_result { Ok(Some(line)) => eprintln!("KMS (stdout): {}", line), Ok(None) if other_stream_closed => break, Ok(None) => other_stream_closed = true, Err(e) => { eprintln!("KMS: Error reading from stdout: {:?}", e); break; } } } } } Ok::<(), std::io::Error>(()) }); } pub struct AwsKmsProcess { _container: testcontainers::ContainerAsync<AwsKmsImage>, client: Client, url: String, } impl AwsKmsProcess { pub async fn create_signer(&self) -> anyhow::Result<AwsKmsSigner> { let response = self .client .create_key() .key_usage(KeyUsageType::SignVerify) .key_spec(KeySpec::EccSecgP256K1) .send() .await?; let id = response .key_metadata .and_then(|metadata| metadata.arn) .ok_or_else(|| anyhow::anyhow!("key arn missing from response"))?; let kms_signer = AwsKmsSigner::new(id.clone(), &self.client).await?; Ok(kms_signer) } pub fn client(&self) -> &Client { &self.client } pub fn url(&self) -> &str { &self.url } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/aws.rs
e2e/tests/aws.rs
#[cfg(test)] mod tests { use anyhow::Result; use e2e::e2e_helpers::start_aws_kms; use fuels::{ accounts::{Account, ViewOnlyAccount, signers::kms::aws::AwsKmsSigner, wallet::Wallet}, core::traits::Signer, prelude::{ AssetId, Contract, LoadConfiguration, TxPolicies, launch_provider_and_get_wallet, }, types::errors::Context, }; #[tokio::test(flavor = "multi_thread")] async fn fund_aws_wallet() -> Result<()> { let kms = start_aws_kms(false).await?; let wallet = launch_provider_and_get_wallet().await?; let amount = 500000000; let key = kms.create_signer().await?; let address = key.address(); wallet .transfer(address, amount, AssetId::zeroed(), TxPolicies::default()) .await .context("Failed to transfer funds")?; let your_kms_key_id = key.key_id(); let provider = wallet.provider().clone(); let aws_client = kms.client(); // ANCHOR: use_kms_wallet let kms_signer = AwsKmsSigner::new(your_kms_key_id, aws_client).await?; let wallet = Wallet::new(kms_signer, provider); // ANCHOR_END: use_kms_wallet let total_base_balance = wallet.get_asset_balance(&AssetId::zeroed()).await?; assert_eq!(total_base_balance, amount as u128); Ok(()) } #[tokio::test(flavor = "multi_thread")] async fn deploy_contract() -> Result<()> { let kms = start_aws_kms(false).await?; let wallet = launch_provider_and_get_wallet().await?; let amount = 500000000; let key = kms.create_signer().await?; let address = key.address(); wallet .transfer(address, amount, AssetId::zeroed(), TxPolicies::default()) .await .context("Failed to transfer funds")?; let your_kms_key_id = key.key_id(); let provider = wallet.provider().clone(); let kms_signer = AwsKmsSigner::new(your_kms_key_id, kms.client()).await?; let aws_wallet = Wallet::new(kms_signer, provider); Contract::load_from( "../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&aws_wallet, TxPolicies::default()) .await?; Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/types_scripts.rs
e2e/tests/types_scripts.rs
use fuels::{ prelude::*, types::{Bits256, U256}, }; #[tokio::test] async fn script_b256() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_b256" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let b256 = Bits256([1; 32]); let response = script_instance.main(b256).call().await?; assert_eq!(response.value, Bits256([2; 32])); Ok(()) } #[tokio::test] async fn main_function_generic_arguments() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_generics" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let bim = GenericBimbam { val: 90 }; let bam_comp = GenericBimbam { val: 4342 }; let bam = GenericSnack { twix: bam_comp, mars: 1000, }; let result = script_instance .main(bim.clone(), bam.clone()) .call() .await?; let expected = ( GenericSnack { twix: GenericBimbam { val: bam.mars as u64, }, mars: 2 * bim.val as u32, }, GenericBimbam { val: 255_u8 }, ); assert_eq!(result.value, expected); Ok(()) } #[tokio::test] async fn main_function_option_result() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/options_results" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); { let result = script_instance.main(Some(42), None).call().await?; assert_eq!(result.value, Ok(Some(true))); } { let result = script_instance.main(Some(987), None).call().await?; assert_eq!(result.value, Ok(None)); } { let expected_error = Err(TestError::ZimZam("error".try_into().unwrap())); let result = script_instance.main(None, Some(987)).call().await?; assert_eq!(result.value, expected_error); } Ok(()) } #[tokio::test] async fn main_function_tuple_types() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_tuples" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let bim = Bim { bim: 90 }; let bam = Bam { bam: "itest".try_into()?, }; let boum = Boum { boum: true }; let result = script_instance .main( (bim, bam, boum), Bam { bam: "twice".try_into()?, }, ) .call() .await?; let expected = ( ( Boum { boum: true }, Bim { bim: 193817 }, Bam { bam: "hello".try_into()?, }, ), 42242, ); assert_eq!(result.value, expected); Ok(()) } #[tokio::test] async fn main_function_vector_arguments() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_vectors" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let u32_vec = vec![0, 1, 2]; let vec_in_vec = vec![vec![0, 1, 2], vec![0, 1, 2]]; let struct_in_vec = vec![SomeStruct { a: 0 }, SomeStruct { a: 1 }]; let vec_in_struct = SomeStruct { a: vec![0, 1, 2] }; let array_in_vec = vec![[0u64, 1u64], [0u64, 1u64]]; let vec_in_array = [vec![0, 1, 2], vec![0, 1, 2]]; let vec_in_enum = SomeEnum::a(vec![0, 1, 2]); let enum_in_vec = vec![SomeEnum::a(0), SomeEnum::a(1)]; let b256_in_vec = vec![Bits256([2; 32]), Bits256([2; 32])]; let tuple_in_vec = vec![(0, 0), (1, 1)]; let vec_in_tuple = (vec![0, 1, 2], vec![0, 1, 2]); let vec_in_a_vec_in_a_struct_in_a_vec = vec![ SomeStruct { a: vec![vec![0, 1, 2], vec![3, 4, 5]], }, SomeStruct { a: vec![vec![6, 7, 8], vec![9, 10, 11]], }, ]; let result = script_instance .main( u32_vec, vec_in_vec, struct_in_vec, vec_in_struct, array_in_vec, vec_in_array, vec_in_enum, enum_in_vec, b256_in_vec, tuple_in_vec, vec_in_tuple, vec_in_a_vec_in_a_struct_in_a_vec, ) .call() .await?; assert!(result.value); Ok(()) } #[tokio::test] async fn script_raw_slice() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "BimBamScript", project = "e2e/sway/types/scripts/script_raw_slice", )), LoadScript( name = "script_instance", script = "BimBamScript", wallet = "wallet" ) ); let raw_slice = RawSlice(vec![40, 41, 42]); let wrapper = Wrapper { inner: vec![raw_slice.clone(), raw_slice.clone()], inner_enum: SomeEnum::Second(raw_slice), }; let rtn = script_instance.main(6, wrapper).call().await?.value; assert_eq!(rtn, RawSlice(vec![0, 1, 2, 3, 4, 5])); Ok(()) } #[tokio::test] async fn main_function_bytes_arguments() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "BimBamScript", project = "e2e/sway/types/scripts/script_bytes", )), LoadScript( name = "script_instance", script = "BimBamScript", wallet = "wallet" ) ); let bytes = Bytes(vec![40, 41, 42]); let wrapper = Wrapper { inner: vec![bytes.clone(), bytes.clone()], inner_enum: SomeEnum::Second(bytes), }; script_instance.main(10, wrapper).call().await?; Ok(()) } fn u128_from(parts: (u64, u64)) -> u128 { let bytes: [u8; 16] = [parts.0.to_be_bytes(), parts.1.to_be_bytes()] .concat() .try_into() .unwrap(); u128::from_be_bytes(bytes) } #[tokio::test] async fn script_handles_u128() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_u128", )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let arg = u128_from((10, 20)); let actual = script_instance.main(arg).call().await?.value; let expected = arg + u128_from((8, 2)); assert_eq!(expected, actual); Ok(()) } fn u256_from(parts: (u64, u64, u64, u64)) -> U256 { let bytes: [u8; 32] = [ parts.0.to_be_bytes(), parts.1.to_be_bytes(), parts.2.to_be_bytes(), parts.3.to_be_bytes(), ] .concat() .try_into() .unwrap(); U256::from(bytes) } #[tokio::test] async fn script_handles_u256() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_u256", )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let arg = u256_from((10, 20, 30, 40)); let actual = script_instance.main(arg).call().await?.value; let expected = arg + u256_from((6, 7, 8, 9)); assert_eq!(expected, actual); Ok(()) } #[tokio::test] async fn script_std_string() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_std_lib_string", )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let response = script_instance .main("script-input".to_string()) .call() .await?; assert_eq!(response.value, "script-return".to_string()); Ok(()) } #[tokio::test] // TODO: Uncomment this test when we find the reason why it fails #[ignore] async fn script_string_slice() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_string_slice", )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let response = script_instance .main("script-input".try_into()?) .call() .await?; assert_eq!(response.value, "script-return"); Ok(()) } #[tokio::test] async fn nested_heap_types() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/types/scripts/script_heap_types", )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let arr = [2u8, 4, 8]; let struct_generics = StructGenerics { one: Bytes(arr.to_vec()), two: String::from("fuel"), three: arr.to_vec(), }; let enum_vec = [struct_generics.clone(), struct_generics].to_vec(); let expected = EnumGeneric::One(enum_vec); let result = script_instance.main().call().await?; assert_eq!(result.value, expected); Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/bindings.rs
e2e/tests/bindings.rs
use fuels::prelude::*; mod hygiene { #[tokio::test] async fn setup_program_test_is_hygienic() { fuels::prelude::setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "SimpleContract", project = "e2e/sway/bindings/simple_contract" )), Deploy( name = "simple_contract_instance", contract = "SimpleContract", wallet = "wallet", random_salt = false, ), ); } } #[tokio::test] async fn compile_bindings_from_contract_file() { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "SimpleContract", project = "e2e/sway/bindings/simple_contract" )), Deploy( name = "simple_contract_instance", contract = "SimpleContract", wallet = "wallet", random_salt = false, ), ); let call_handler = simple_contract_instance .methods() .takes_int_returns_bool(42); let encoded_args = call_handler.call.encoded_args.unwrap(); assert_eq!(encoded_args, [0, 0, 0, 42]); } #[tokio::test] async fn compile_bindings_from_inline_contract() -> Result<()> { abigen!(Contract( name = "SimpleContract", // abi generated with: "e2e/sway/abi/simple_contract" abi = r#" { "programType": "contract", "specVersion": "1", "encodingVersion": "1", "concreteTypes": [ { "type": "bool", "concreteTypeId": "b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903" }, { "type": "u32", "concreteTypeId": "d7649d428b9ff33d188ecbf38a7e4d8fd167fa01b2e10fe9a8f9308e52f1d7cc" } ], "metadataTypes": [], "functions": [ { "inputs": [ { "name": "_arg", "concreteTypeId": "d7649d428b9ff33d188ecbf38a7e4d8fd167fa01b2e10fe9a8f9308e52f1d7cc" } ], "name": "takes_u32_returns_bool", "output": "b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903", "attributes": null } ], "loggedTypes": [], "messagesTypes": [], "configurables": [] } "#, )); let wallet = launch_provider_and_get_wallet().await?; let contract_instance = SimpleContract::new(ContractId::zeroed(), wallet); let call_handler = contract_instance.methods().takes_u32_returns_bool(42_u32); let encoded_args = call_handler.call.encoded_args.unwrap(); assert_eq!(encoded_args, [0, 0, 0, 42]); Ok(()) } #[tokio::test] async fn shared_types() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen( Contract( name = "ContractA", project = "e2e/sway/bindings/sharing_types/contract_a" ), Contract( name = "ContractB", project = "e2e/sway/bindings/sharing_types/contract_b" ), ), Deploy( name = "contract_a", contract = "ContractA", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_b", contract = "ContractB", wallet = "wallet", random_salt = false, ), ); { let methods = contract_a.methods(); { let shared_struct_2 = SharedStruct2 { a: 11u32, b: SharedStruct1 { a: 12u32 }, }; let shared_enum = SharedEnum::a(10u64); let response = methods .uses_shared_type(shared_struct_2.clone(), shared_enum.clone()) .call() .await? .value; assert_eq!(response, (shared_struct_2, shared_enum)); } { let same_name_struct = abigen_bindings::contract_a_mod::StructSameNameButDifferentInternals { a: 13u32 }; let same_name_enum = abigen_bindings::contract_a_mod::EnumSameNameButDifferentInternals::a(14u32); let response = methods .uses_types_that_share_only_names(same_name_struct.clone(), same_name_enum.clone()) .call() .await? .value; assert_eq!(response, (same_name_struct, same_name_enum)); } { let arg = UniqueStructToContractA { a: SharedStruct2 { a: 15u32, b: SharedStruct1 { a: 5u8 }, }, }; let response = methods .uses_shared_type_inside_owned_one(arg.clone()) .call() .await? .value; assert_eq!(response, arg); } } { let methods = contract_b.methods(); { let shared_struct_2 = SharedStruct2 { a: 11u32, b: SharedStruct1 { a: 12u32 }, }; let shared_enum = SharedEnum::a(10u64); let response = methods .uses_shared_type(shared_struct_2.clone(), shared_enum.clone()) .call() .await? .value; assert_eq!(response, (shared_struct_2, shared_enum)); } { let same_name_struct = abigen_bindings::contract_b_mod::StructSameNameButDifferentInternals { a: [13u64] }; let same_name_enum = abigen_bindings::contract_b_mod::EnumSameNameButDifferentInternals::a([14u64]); let response = methods .uses_types_that_share_only_names(same_name_struct.clone(), same_name_enum.clone()) .call() .await? .value; assert_eq!(response, (same_name_struct, same_name_enum)); } { let arg = UniqueStructToContractB { a: SharedStruct2 { a: 15u32, b: SharedStruct1 { a: 5u8 }, }, }; let response = methods .uses_shared_type_inside_owned_one(arg.clone()) .call() .await? .value; assert_eq!(response, arg); } } Ok(()) } #[tokio::test] async fn type_paths_respected() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "ContractA", project = "e2e/sway/bindings/type_paths" )), Deploy( name = "contract_a_instance", contract = "ContractA", wallet = "wallet", random_salt = false, ), ); { let contract_a_type = abigen_bindings::contract_a_mod::contract_a_types::VeryCommonNameStruct { another_field: 10u32, }; let rtn = contract_a_instance .methods() .test_function(AWrapper { field: contract_a_type, }) .call() .await? .value; let rtn_using_the_other_type = abigen_bindings::contract_a_mod::another_lib::VeryCommonNameStruct { field_a: 10u32 }; assert_eq!(rtn, rtn_using_the_other_type); } Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/imports.rs
e2e/tests/imports.rs
#[cfg(test)] mod tests { #[test] fn provides_output_type() { // test exists because we've excluded fuel_tx::Output twice #[allow(unused_imports)] use fuels::types::output::Output; } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/types_contracts.rs
e2e/tests/types_contracts.rs
use std::str::FromStr; use fuels::{ prelude::*, types::{B512, Bits256, EvmAddress, Identity, SizedAsciiString, U256}, }; #[tokio::test] async fn test_methods_typeless_argument() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/empty_arguments" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let response = contract_instance .methods() .method_with_empty_argument() .call() .await?; assert_eq!(response.value, 63); Ok(()) } #[tokio::test] async fn call_with_empty_return() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/types/contracts/call_empty_return" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let _response = contract_instance.methods().store_value(42).call().await?; Ok(()) } #[tokio::test] async fn call_with_structs() -> Result<()> { // Generates the bindings from the an ABI definition inline. // The generated bindings can be accessed through `MyContract`. // ANCHOR: struct_generation abigen!(Contract( name = "MyContract", abi = "e2e/sway/types/contracts/complex_types_contract/out/release/complex_types_contract-abi.json" )); // Here we can use `CounterConfig`, a struct originally // defined in the contract. let counter_config = CounterConfig { dummy: true, initial_value: 42, }; // ANCHOR_END: struct_generation let wallet = launch_provider_and_get_wallet().await?; let contract_id = Contract::load_from( "sway/types/contracts/complex_types_contract/out/release/complex_types_contract.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let contract_methods = MyContract::new(contract_id, wallet).methods(); let response = contract_methods .initialize_counter(counter_config) .call() .await?; assert_eq!(42, response.value); let response = contract_methods.increment_counter(10).call().await?; assert_eq!(52, response.value); Ok(()) } #[tokio::test] async fn abigen_different_structs_same_arg_name() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/two_structs" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let param_one = StructOne { foo: 42 }; let param_two = StructTwo { bar: 42 }; let contract_methods = contract_instance.methods(); let res_one = contract_methods.something(param_one).call().await?; assert_eq!(res_one.value, 43); let res_two = contract_methods.something_else(param_two).call().await?; assert_eq!(res_two.value, 41); Ok(()) } #[tokio::test] async fn nested_structs() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/nested_structs" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let expected = AllStruct { some_struct: SomeStruct { field: 12345, field_2: true, }, }; let contract_methods = contract_instance.methods(); let actual = contract_methods.get_struct().call().await?.value; assert_eq!(actual, expected); let fuelvm_judgement = contract_methods .check_struct_integrity(expected) .call() .await? .value; assert!( fuelvm_judgement, "The FuelVM deems that we've not encoded the argument correctly. Investigate!" ); let memory_address = MemoryAddress { contract_id: ContractId::zeroed(), function_selector: 10, function_data: 0, }; let call_data = CallData { memory_address, num_coins_to_forward: 10, asset_id_of_coins_to_forward: ContractId::zeroed(), amount_of_gas_to_forward: 5, }; let actual = contract_methods .nested_struct_with_reserved_keyword_substring(call_data.clone()) .call() .await? .value; assert_eq!(actual, call_data); Ok(()) } #[tokio::test] async fn calls_with_empty_struct() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/complex_types_contract" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); { let response = contract_methods.get_empty_struct().call().await?; assert_eq!(response.value, EmptyStruct {}); } { let response = contract_methods .input_empty_struct(EmptyStruct {}) .call() .await?; assert!(response.value); } Ok(()) } #[tokio::test] async fn can_use_try_into_to_construct_struct_from_bytes() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/types/contracts/enum_inside_struct/out/release\ /enum_inside_struct-abi.json" )); let cocktail_in_bytes: Vec<u8> = vec![ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 3, ]; let expected = Cocktail { the_thing_you_mix_in: Shaker::Mojito(2), glass: 3, }; // as slice let actual: Cocktail = cocktail_in_bytes[..].try_into()?; assert_eq!(actual, expected); // as ref let actual: Cocktail = (&cocktail_in_bytes).try_into()?; assert_eq!(actual, expected); // as value let actual: Cocktail = cocktail_in_bytes.try_into()?; assert_eq!(actual, expected); Ok(()) } #[tokio::test] async fn test_tuples() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/tuples" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); { let response = contract_methods.returns_tuple((1, 2)).call().await?; assert_eq!(response.value, (1, 2)); } { // Tuple with struct. let my_struct_tuple = ( 42, Person { name: "Jane".try_into()?, }, ); let response = contract_methods .returns_struct_in_tuple(my_struct_tuple.clone()) .call() .await?; assert_eq!(response.value, my_struct_tuple); } { // Tuple with enum. let my_enum_tuple: (u64, State) = (42, State::A); let response = contract_methods .returns_enum_in_tuple(my_enum_tuple.clone()) .call() .await?; assert_eq!(response.value, my_enum_tuple); } { // Tuple with single element let my_enum_tuple = (123u64,); let response = contract_methods .single_element_tuple(my_enum_tuple) .call() .await?; assert_eq!(response.value, my_enum_tuple); } { // tuple with b256 let id = *ContractId::zeroed(); let my_b256_u8_tuple = (Bits256(id), 10); let response = contract_methods .tuple_with_b256(my_b256_u8_tuple) .call() .await?; assert_eq!(response.value, my_b256_u8_tuple); } Ok(()) } #[tokio::test] async fn test_evm_address() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/evm_address" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); { // ANCHOR: evm_address_arg let b256 = Bits256::from_hex_str( "0x1616060606060606060606060606060606060606060606060606060606060606", )?; let evm_address = EvmAddress::from(b256); let call_handler = contract_instance .methods() .evm_address_as_input(evm_address); // ANCHOR_END: evm_address_arg assert!(call_handler.call().await?.value); } { let b256 = Bits256::from_hex_str( "0x0606060606060606060606060606060606060606060606060606060606060606", )?; let expected_evm_address = EvmAddress::from(b256); assert_eq!( contract_instance .methods() .evm_address_from_literal() .call() .await? .value, expected_evm_address ); } { let b256 = Bits256::from_hex_str( "0x0606060606060606060606060606060606060606060606060606060606060606", )?; let expected_evm_address = EvmAddress::from(b256); assert_eq!( contract_instance .methods() .evm_address_from_argument(b256) .call() .await? .value, expected_evm_address ); } Ok(()) } #[tokio::test] async fn test_array() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); assert_eq!( contract_instance .methods() .get_array([42; 2]) .call() .await? .value, [42; 2] ); Ok(()) } #[tokio::test] async fn test_arrays_with_custom_types() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let persons = [ Person { name: "John".try_into()?, }, Person { name: "Jane".try_into()?, }, ]; let contract_methods = contract_instance.methods(); let response = contract_methods.array_of_structs(persons).call().await?; assert_eq!("John", response.value[0].name); assert_eq!("Jane", response.value[1].name); let states = [State::A, State::B]; let response = contract_methods .array_of_enums(states.clone()) .call() .await?; assert_eq!(states[0], response.value[0]); assert_eq!(states[1], response.value[1]); Ok(()) } #[tokio::test] async fn str_in_array() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/str_in_array" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let input = ["foo", "bar", "baz"].map(|str| str.try_into().unwrap()); let contract_methods = contract_instance.methods(); let response = contract_methods .take_array_string_shuffle(input.clone()) .call() .await?; assert_eq!(response.value, ["baz", "foo", "bar"]); let response = contract_methods .take_array_string_return_single(input.clone()) .call() .await?; assert_eq!(response.value, ["foo"]); let response = contract_methods .take_array_string_return_single_element(input) .call() .await?; assert_eq!(response.value, "bar"); Ok(()) } #[tokio::test] async fn test_enum_inside_struct() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/enum_inside_struct" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let expected = Cocktail { the_thing_you_mix_in: Shaker::Mojito(11), glass: 333, }; let contract_methods = contract_instance.methods(); let response = contract_methods .return_enum_inside_struct(11) .call() .await?; assert_eq!(response.value, expected); let enum_inside_struct = Cocktail { the_thing_you_mix_in: Shaker::Cosmopolitan(444), glass: 555, }; let response = contract_methods .take_enum_inside_struct(enum_inside_struct) .call() .await?; assert_eq!(response.value, 555); Ok(()) } #[tokio::test] async fn native_types_support() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/native_types" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let user = User { weight: 10, address: Address::zeroed(), }; let contract_methods = contract_instance.methods(); let response = contract_methods.wrapped_address(user).call().await?; assert_eq!(response.value.address, Address::zeroed()); let response = contract_methods .unwrapped_address(Address::zeroed()) .call() .await?; assert_eq!( response.value, Address::from_str("0x0000000000000000000000000000000000000000000000000000000000000000")? ); Ok(()) } #[tokio::test] async fn enum_coding_w_variable_width_variants() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/enum_encoding" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); // If we had a regression on the issue of enum encoding width, then we'll // probably end up mangling arg_2 and onward which will fail this test. let expected = BigBundle { arg_1: EnumThatHasABigAndSmallVariant::Small(12345), arg_2: 6666, arg_3: 7777, arg_4: 8888, }; let contract_methods = contract_instance.methods(); let actual = contract_methods.get_big_bundle().call().await?.value; assert_eq!(actual, expected); let fuelvm_judgement = contract_methods .check_big_bundle_integrity(expected) .call() .await? .value; assert!( fuelvm_judgement, "The FuelVM deems that we've not encoded the bundle correctly. Investigate!" ); Ok(()) } #[tokio::test] async fn enum_coding_w_unit_enums() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/enum_encoding" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); // If we had a regression on the issue of unit enum encoding width, then // we'll end up mangling arg_2 let expected = UnitBundle { arg_1: UnitEnum::var2, arg_2: u64::MAX, }; let contract_methods = contract_instance.methods(); let actual = contract_methods.get_unit_bundle().call().await?.value; assert_eq!(actual, expected); let fuelvm_judgement = contract_methods .check_unit_bundle_integrity(expected) .call() .await? .value; assert!( fuelvm_judgement, "The FuelVM deems that we've not encoded the bundle correctly. Investigate!" ); Ok(()) } #[tokio::test] async fn enum_as_input() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/enum_as_input" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let expected = MaxedOutVariantsEnum::Variant255(11); let contract_methods = contract_instance.methods(); let actual = contract_methods.get_max_variant().call().await?.value; assert_eq!(expected, actual); let expected = StandardEnum::Two(12345); let contract_methods = contract_instance.methods(); let actual = contract_methods.get_standard_enum().call().await?.value; assert_eq!(expected, actual); let fuelvm_judgement = contract_methods .check_standard_enum_integrity(expected) .call() .await? .value; assert!( fuelvm_judgement, "The FuelVM deems that we've not encoded the standard enum correctly. Investigate!" ); let expected = UnitEnum::Two; let actual = contract_methods.get_unit_enum().call().await?.value; assert_eq!(actual, expected); let fuelvm_judgement = contract_methods .check_unit_enum_integrity(expected) .call() .await? .value; assert!( fuelvm_judgement, "The FuelVM deems that we've not encoded the unit enum correctly. Investigate!" ); Ok(()) } #[tokio::test] async fn can_use_try_into_to_construct_enum_from_bytes() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/types/contracts/enum_inside_struct/out/release\ /enum_inside_struct-abi.json" )); let shaker_in_bytes: Vec<u8> = vec![0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2]; let expected = Shaker::Mojito(2); // as slice let actual: Shaker = shaker_in_bytes[..].try_into()?; assert_eq!(actual, expected); // as ref let actual: Shaker = (&shaker_in_bytes).try_into()?; assert_eq!(actual, expected); // as value let actual: Shaker = shaker_in_bytes.try_into()?; assert_eq!(actual, expected); Ok(()) } #[tokio::test] async fn type_inside_enum() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/type_inside_enum" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); // String inside enum let enum_string = SomeEnum::SomeStr("asdf".try_into()?); let contract_methods = contract_instance.methods(); let response = contract_methods .str_inside_enum(enum_string.clone()) .call() .await?; assert_eq!(response.value, enum_string); // Array inside enum let enum_array = SomeEnum::SomeArr([1, 2, 3, 4]); let response = contract_methods .arr_inside_enum(enum_array.clone()) .call() .await?; assert_eq!(response.value, enum_array); // Struct inside enum let response = contract_methods .return_struct_inside_enum(11) .call() .await?; let expected = Shaker::Cosmopolitan(Recipe { ice: 22, sugar: 11 }); assert_eq!(response.value, expected); let struct_inside_enum = Shaker::Cosmopolitan(Recipe { ice: 22, sugar: 66 }); let response = contract_methods .take_struct_inside_enum(struct_inside_enum) .call() .await?; assert_eq!(response.value, 8888); // Enum inside enum let expected_enum = EnumLevel3::El2(EnumLevel2::El1(EnumLevel1::Num(42))); let response = contract_methods.get_nested_enum().call().await?; assert_eq!(response.value, expected_enum); let response = contract_methods .check_nested_enum_integrity(expected_enum) .call() .await?; assert!( response.value, "The FuelVM deems that we've not encoded the nested enum correctly. Investigate!" ); Ok(()) } #[tokio::test] async fn test_rust_option_can_be_decoded() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/options" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let expected_address = Address::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; let s = TestStruct { option: Some(expected_address), }; let e = TestEnum::EnumOption(Some(expected_address)); let expected_some_address = Some(expected_address); let response = contract_methods.get_some_address().call().await?; assert_eq!(response.value, expected_some_address); let expected_some_u64 = Some(10); let response = contract_methods.get_some_u64().call().await?; assert_eq!(response.value, expected_some_u64); let response = contract_methods.get_some_struct().call().await?; assert_eq!(response.value, Some(s.clone())); let response = contract_methods.get_some_enum().call().await?; assert_eq!(response.value, Some(e.clone())); let response = contract_methods.get_some_tuple().call().await?; assert_eq!(response.value, Some((s.clone(), e.clone()))); let expected_none = None; let response = contract_methods.get_none().call().await?; assert_eq!(response.value, expected_none); Ok(()) } #[tokio::test] async fn test_rust_option_can_be_encoded() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/options" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let expected_address = Address::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; let s = TestStruct { option: Some(expected_address), }; let e = TestEnum::EnumOption(Some(expected_address)); let expected_u64 = Some(36); let response = contract_methods .input_primitive(expected_u64) .call() .await?; assert!(response.value); let expected_struct = Some(s); let response = contract_methods .input_struct(expected_struct) .call() .await?; assert!(response.value); let expected_enum = Some(e); let response = contract_methods.input_enum(expected_enum).call().await?; assert!(response.value); let expected_none = None; let response = contract_methods.input_none(expected_none).call().await?; assert!(response.value); Ok(()) } #[tokio::test] async fn test_rust_result_can_be_decoded() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/results" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let expected_address = Address::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; let s = TestStruct { option: Some(expected_address), }; let e = TestEnum::EnumOption(Some(expected_address)); let expected_ok_address = Ok(expected_address); let response = contract_methods.get_ok_address().call().await?; assert_eq!(response.value, expected_ok_address); let expected_some_u64 = Ok(10); let response = contract_methods.get_ok_u64().call().await?; assert_eq!(response.value, expected_some_u64); let response = contract_methods.get_ok_struct().call().await?; assert_eq!(response.value, Ok(s.clone())); let response = contract_methods.get_ok_enum().call().await?; assert_eq!(response.value, Ok(e.clone())); let response = contract_methods.get_ok_tuple().call().await?; assert_eq!(response.value, Ok((s, e))); let expected_error = Err(TestError::NoAddress("error".try_into().unwrap())); let response = contract_methods.get_error().call().await?; assert_eq!(response.value, expected_error); Ok(()) } #[tokio::test] async fn test_rust_result_can_be_encoded() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/results" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let expected_address = Address::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; let expected_ok_address = Ok(expected_address); let response = contract_methods .input_ok(expected_ok_address) .call() .await?; assert!(response.value); let expected_error = Err(TestError::NoAddress("error".try_into().unwrap())); let response = contract_methods.input_error(expected_error).call().await?; assert!(response.value); Ok(()) } #[tokio::test] async fn test_identity_can_be_decoded() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/identity" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let expected_address = Address::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; let expected_contract_id = ContractId::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; let s = TestStruct { identity: Identity::Address(expected_address), }; let e = TestEnum::EnumIdentity(Identity::ContractId(expected_contract_id)); let response = contract_methods.get_identity_address().call().await?; assert_eq!(response.value, Identity::Address(expected_address)); let response = contract_methods.get_identity_contract_id().call().await?; assert_eq!(response.value, Identity::ContractId(expected_contract_id)); let response = contract_methods.get_struct_with_identity().call().await?; assert_eq!(response.value, s.clone()); let response = contract_methods.get_enum_with_identity().call().await?; assert_eq!(response.value, e.clone()); let response = contract_methods.get_identity_tuple().call().await?; assert_eq!(response.value, (s, e)); Ok(()) } #[tokio::test] async fn test_identity_can_be_encoded() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/identity" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let expected_address = Address::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; let expected_contract_id = ContractId::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; let s = TestStruct { identity: Identity::Address(expected_address), }; let e = TestEnum::EnumIdentity(Identity::ContractId(expected_contract_id)); let response = contract_methods .input_identity(Identity::Address(expected_address)) .call() .await?; assert!(response.value); let response = contract_methods .input_struct_with_identity(s) .call() .await?; assert!(response.value); let response = contract_methods.input_enum_with_identity(e).call().await?; assert!(response.value); Ok(()) } #[tokio::test] async fn test_identity_with_two_contracts() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/identity" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_instance2", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let expected_address = Address::from_str("0xd58573593432a30a800f97ad32f877425c223a9e427ab557aab5d5bb89156db0")?; { let response = contract_instance .methods() .input_identity(Identity::Address(expected_address)) .call() .await?; assert!(response.value); } { let response = contract_instance2 .methods() .input_identity(Identity::Address(expected_address)) .call() .await?; assert!(response.value); } Ok(()) } #[tokio::test] async fn generics_test() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TypesContract", project = "e2e/sway/types/contracts/generics" )), Deploy( name = "contract_instance", contract = "TypesContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); { // ANCHOR: generic // simple struct with a single generic param let arg1 = SimpleGeneric { single_generic_param: 123u64, }; let result = contract_methods .struct_w_generic(arg1.clone()) .call() .await? .value; assert_eq!(result, arg1); // ANCHOR_END: generic } { // struct that delegates the generic param internally let arg1 = PassTheGenericOn { one: SimpleGeneric { single_generic_param: "abc".try_into()?, }, }; let result = contract_methods .struct_delegating_generic(arg1.clone()) .call() .await? .value; assert_eq!(result, arg1); } { // struct that has the generic in an array let arg1 = StructWArrayGeneric { a: [1u32, 2u32] }; let result = contract_methods
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
true
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/scripts.rs
e2e/tests/scripts.rs
use std::time::Duration; use fuel_tx::Output; use fuels::{ accounts::signers::private_key::PrivateKeySigner, client::{PageDirection, PaginationRequest}, core::{ Configurables, codec::{DecoderConfig, EncoderConfig}, traits::Tokenizable, }, prelude::*, programs::{DEFAULT_MAX_FEE_ESTIMATION_TOLERANCE, executable::Executable}, types::{Bits256, Identity}, }; use rand::thread_rng; #[tokio::test] async fn main_function_arguments() -> Result<()> { // ANCHOR: script_with_arguments // The abigen is used for the same purpose as with contracts (Rust bindings) abigen!(Script( name = "MyScript", abi = "e2e/sway/scripts/arguments/out/release/arguments-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let bin_path = "sway/scripts/arguments/out/release/arguments.bin"; let script_instance = MyScript::new(wallet, bin_path); let bim = Bimbam { val: 90 }; let bam = SugarySnack { twix: 100, mars: 1000, }; let result = script_instance.main(bim, bam).call().await?; let expected = Bimbam { val: 2190 }; assert_eq!(result.value, expected); // ANCHOR_END: script_with_arguments Ok(()) } #[tokio::test] async fn script_call_has_same_estimated_and_used_gas() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/basic_script" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let tolerance = Some(0.0); let block_horizon = Some(1); let a = 4u64; let b = 2u32; let estimated_total_gas = script_instance .main(a, b) .estimate_transaction_cost(tolerance, block_horizon) .await? .total_gas; let total_gas = script_instance.main(a, b).call().await?.tx_status.total_gas; assert_eq!(estimated_total_gas, total_gas); Ok(()) } #[tokio::test] async fn test_basic_script_with_tx_policies() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "bimbam_script", project = "e2e/sway/scripts/basic_script" )), LoadScript( name = "script_instance", script = "bimbam_script", wallet = "wallet" ) ); let a = 1000u64; let b = 2000u32; let result = script_instance.main(a, b).call().await?; assert_eq!(result.value, "hello"); // ANCHOR: script_with_tx_policies let tx_policies = TxPolicies::default().with_script_gas_limit(1_000_000); let result = script_instance .main(a, b) .with_tx_policies(tx_policies) .call() .await?; // ANCHOR_END: script_with_tx_policies assert_eq!(result.value, "hello"); Ok(()) } #[tokio::test] async fn test_output_variable_estimation() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "transfer_script", project = "e2e/sway/scripts/transfer_script" )), LoadScript( name = "script_instance", script = "transfer_script", wallet = "wallet" ) ); let provider = wallet.provider().clone(); let receiver = Wallet::random(&mut thread_rng(), provider); let amount = 1000; let asset_id = AssetId::zeroed(); let script_call = script_instance.main(amount, asset_id, Identity::Address(receiver.address())); let inputs = wallet .get_asset_inputs_for_amount(asset_id, amount.into(), None) .await?; let output = Output::change(wallet.address(), 0, asset_id); let _ = script_call .with_inputs(inputs) .with_outputs(vec![output]) .with_variable_output_policy(VariableOutputPolicy::EstimateMinimum) .call() .await?; let receiver_balance = receiver.get_asset_balance(&asset_id).await?; assert_eq!(receiver_balance, amount as u128); Ok(()) } #[tokio::test] async fn test_script_struct() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/script_struct" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let my_struct = MyStruct { number: 42, boolean: true, }; let response = script_instance.main(my_struct).call().await?; assert_eq!(response.value, 42); Ok(()) } #[tokio::test] async fn test_script_enum() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/script_enum" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let my_enum = MyEnum::Two; let response = script_instance.main(my_enum).call().await?; assert_eq!(response.value, 2); Ok(()) } #[tokio::test] async fn test_script_array() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/script_array" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let my_array: [u64; 4] = [1, 2, 3, 4]; let response = script_instance.main(my_array).call().await?; assert_eq!(response.value, 10); Ok(()) } #[tokio::test] async fn can_configure_decoder_on_script_call() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/script_needs_custom_decoder" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); { // Will fail if max_tokens too low script_instance .main(false) .with_decoder_config(DecoderConfig { max_tokens: 101, ..Default::default() }) .call() .await .expect_err( "Should fail because return type has more tokens than what is allowed by default", ); } { // When the token limit is bumped should pass let response = script_instance .main(false) .with_decoder_config(DecoderConfig { max_tokens: 1002, ..Default::default() }) .call() .await? .value .unwrap(); assert_eq!(response, [0u8; 1000]); } Ok(()) } #[tokio::test] async fn test_script_submit_and_response() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/script_struct" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let my_struct = MyStruct { number: 42, boolean: true, }; // ANCHOR: submit_response_script let submitted_tx = script_instance.main(my_struct).submit().await?; tokio::time::sleep(Duration::from_millis(500)).await; let value = submitted_tx.response().await?.value; // ANCHOR_END: submit_response_script assert_eq!(value, 42); Ok(()) } #[tokio::test] async fn test_script_transaction_builder() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/basic_script" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let provider = wallet.provider(); // ANCHOR: script_call_tb let script_call_handler = script_instance.main(1, 2); let mut tb = script_call_handler.transaction_builder().await?; // customize the builder... wallet.adjust_for_fee(&mut tb, 0).await?; wallet.add_witnesses(&mut tb)?; let tx = tb.build(provider).await?; let tx_id = provider.send_transaction(tx).await?; tokio::time::sleep(Duration::from_millis(500)).await; let tx_status = provider.tx_status(&tx_id).await?; let response = script_call_handler.get_response(tx_status)?; assert_eq!(response.value, "hello"); // ANCHOR_END: script_call_tb Ok(()) } #[tokio::test] async fn script_encoder_config_is_applied() { abigen!(Script( name = "MyScript", abi = "e2e/sway/scripts/basic_script/out/release/basic_script-abi.json" )); let wallet = launch_provider_and_get_wallet().await.expect(""); let bin_path = "sway/scripts/basic_script/out/release/basic_script.bin"; let script_instance_without_encoder_config = MyScript::new(wallet.clone(), bin_path); { let _encoding_ok = script_instance_without_encoder_config .main(1, 2) .call() .await .expect("should not fail as it uses the default encoder config"); } { let encoder_config = EncoderConfig { max_tokens: 1, ..Default::default() }; let script_instance_with_encoder_config = MyScript::new(wallet.clone(), bin_path).with_encoder_config(encoder_config); // uses 2 tokens when 1 is the limit let encoding_error = script_instance_with_encoder_config .main(1, 2) .call() .await .expect_err("should error"); assert!(encoding_error.to_string().contains( "cannot encode script call arguments: codec: token limit `1` reached while encoding" )); let encoding_error = script_instance_with_encoder_config .main(1, 2) .simulate(Execution::realistic()) .await .expect_err("should error"); assert!(encoding_error.to_string().contains( "cannot encode script call arguments: codec: token limit `1` reached while encoding" )); } } #[tokio::test] async fn simulations_can_be_made_without_coins() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/basic_script" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let provider = wallet.provider().clone(); let no_funds_wallet = Wallet::random(&mut thread_rng(), provider); let script_instance = script_instance.with_account(no_funds_wallet); let value = script_instance .main(1000, 2000) .simulate(Execution::state_read_only()) .await? .value; assert_eq!(value.as_ref(), "hello"); Ok(()) } #[tokio::test] async fn can_be_run_in_blobs_builder() -> Result<()> { abigen!(Script( abi = "e2e/sway/scripts/script_blobs/out/release/script_blobs-abi.json", name = "MyScript" )); let binary_path = "./sway/scripts/script_blobs/out/release/script_blobs.bin"; let wallet = launch_provider_and_get_wallet().await?; let provider = wallet.provider().clone(); // ANCHOR: preload_low_level let regular = Executable::load_from(binary_path)?; let configurables = MyScriptConfigurables::default().with_SECRET_NUMBER(10001)?; let loader = regular .convert_to_loader()? .with_configurables(configurables); // The Blob must be uploaded manually, otherwise the script code will revert. loader.upload_blob(wallet.clone()).await?; let encoder = fuels::core::codec::ABIEncoder::default(); let token = MyStruct { field_a: MyEnum::B(99), field_b: Bits256([17; 32]), } .into_token(); let data = encoder.encode(&[token])?; let mut tb = ScriptTransactionBuilder::default() .with_script(loader.code()) .with_script_data(data); wallet.adjust_for_fee(&mut tb, 0).await?; wallet.add_witnesses(&mut tb)?; let tx = tb.build(&provider).await?; let response = provider.send_transaction_and_await_commit(tx).await?; response.check(None)?; // ANCHOR_END: preload_low_level Ok(()) } #[tokio::test] async fn can_be_run_in_blobs_high_level() -> Result<()> { setup_program_test!( Abigen(Script( project = "e2e/sway/scripts/script_blobs", name = "MyScript" )), Wallets("wallet"), LoadScript(name = "my_script", script = "MyScript", wallet = "wallet") ); let configurables = MyScriptConfigurables::default().with_SECRET_NUMBER(10001)?; let mut my_script = my_script.with_configurables(configurables); let arg = MyStruct { field_a: MyEnum::B(99), field_b: Bits256([17; 32]), }; let secret = my_script .convert_into_loader() .await? .main(arg) .call() .await? .value; assert_eq!(secret, 10001); Ok(()) } #[tokio::test] async fn high_level_blob_upload_sets_max_fee_tolerance() -> Result<()> { let node_config = NodeConfig { starting_gas_price: 1000000000, ..Default::default() }; let signer = PrivateKeySigner::random(&mut thread_rng()); let coins = setup_single_asset_coins(signer.address(), AssetId::zeroed(), 1, u64::MAX); let provider = setup_test_provider(coins, vec![], Some(node_config), None).await?; let wallet = Wallet::new(signer, provider.clone()); setup_program_test!( Abigen(Script( project = "e2e/sway/scripts/script_blobs", name = "MyScript" )), LoadScript(name = "my_script", script = "MyScript", wallet = "wallet") ); let loader = Executable::from_bytes(std::fs::read( "sway/scripts/script_blobs/out/release/script_blobs.bin", )?) .convert_to_loader()?; let zero_tolerance_fee = { let mut tb = BlobTransactionBuilder::default() .with_blob(loader.blob()) .with_max_fee_estimation_tolerance(0.); wallet.adjust_for_fee(&mut tb, 0).await?; wallet.add_witnesses(&mut tb)?; let tx = tb.build(&provider).await?; tx.max_fee().unwrap() }; let mut my_script = my_script; my_script.convert_into_loader().await?; let max_fee_of_sent_blob_tx = provider .get_transactions(PaginationRequest { cursor: None, results: 20, direction: PageDirection::Forward, }) .await? .results .into_iter() .find_map(|tx| { if let TransactionType::Blob(blob_transaction) = tx.transaction { blob_transaction.max_fee() } else { None } }) .unwrap(); assert_eq!( max_fee_of_sent_blob_tx, (zero_tolerance_fee as f32 * (1.0 + DEFAULT_MAX_FEE_ESTIMATION_TOLERANCE)).ceil() as u64, "the blob upload tx should have had the max fee increased by the default estimation tolerance" ); Ok(()) } #[tokio::test] async fn no_data_section_blob_run() -> Result<()> { setup_program_test!( Abigen(Script( project = "e2e/sway/scripts/empty", name = "MyScript" )), Wallets("wallet"), LoadScript(name = "my_script", script = "MyScript", wallet = "wallet") ); let mut my_script = my_script; // ANCHOR: preload_high_level my_script.convert_into_loader().await?.main().call().await?; // ANCHOR_END: preload_high_level Ok(()) } #[tokio::test] async fn loader_script_calling_loader_proxy() -> Result<()> { setup_program_test!( Abigen( Contract( name = "MyContract", project = "e2e/sway/contracts/huge_contract" ), Contract(name = "MyProxy", project = "e2e/sway/contracts/proxy"), Script(name = "MyScript", project = "e2e/sway/scripts/script_proxy"), ), Wallets("wallet"), LoadScript(name = "my_script", script = "MyScript", wallet = "wallet") ); let contract_binary = "sway/contracts/huge_contract/out/release/huge_contract.bin"; let contract = Contract::load_from(contract_binary, LoadConfiguration::default())?; let contract_id = contract .convert_to_loader(100)? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let contract_binary = "sway/contracts/proxy/out/release/proxy.bin"; let proxy_id = Contract::load_from(contract_binary, LoadConfiguration::default())? .convert_to_loader(100)? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let proxy = MyProxy::new(proxy_id, wallet.clone()); proxy .methods() .set_target_contract(contract_id) .call() .await?; let mut my_script = my_script; let result = my_script .convert_into_loader() .await? .main(proxy_id) .with_contract_ids(&[contract_id, proxy_id]) .call() .await?; assert!(result.value); Ok(()) } #[tokio::test] async fn loader_can_be_presented_as_a_normal_script_with_shifted_configurables() -> Result<()> { abigen!(Script( abi = "e2e/sway/scripts/script_blobs/out/release/script_blobs-abi.json", name = "MyScript" )); let binary_path = "./sway/scripts/script_blobs/out/release/script_blobs.bin"; let wallet = launch_provider_and_get_wallet().await?; let provider = wallet.provider().clone(); let regular = Executable::load_from(binary_path)?; let configurables = MyScriptConfigurables::default().with_SECRET_NUMBER(10001)?; let loader = regular.clone().convert_to_loader()?; // The Blob must be uploaded manually, otherwise the script code will revert. loader.upload_blob(wallet.clone()).await?; let encoder = fuels::core::codec::ABIEncoder::default(); let token = MyStruct { field_a: MyEnum::B(99), field_b: Bits256([17; 32]), } .into_token(); let data = encoder.encode(&[token])?; let configurables: Configurables = configurables.into(); let offset = regular .configurables_offset_in_code()? .unwrap_or_else(|| regular.data_offset_in_code().unwrap()); let shifted_configurables = configurables .with_shifted_offsets(-(offset as i64)) .unwrap() .with_shifted_offsets(loader.configurables_offset_in_code() as i64) .unwrap(); let loader_posing_as_normal_script = Executable::from_bytes(loader.code()).with_configurables(shifted_configurables); let mut tb = ScriptTransactionBuilder::default() .with_script(loader_posing_as_normal_script.code()) .with_script_data(data); wallet.adjust_for_fee(&mut tb, 0).await?; wallet.add_witnesses(&mut tb)?; let tx = tb.build(&provider).await?; let response = provider.send_transaction_and_await_commit(tx).await?; response.check(None)?; Ok(()) } #[tokio::test] async fn script_call_respects_maturity_and_expiration() -> Result<()> { abigen!(Script( name = "MyScript", abi = "e2e/sway/scripts/basic_script/out/release/basic_script-abi.json" )); let wallet = launch_provider_and_get_wallet().await.expect(""); let provider = wallet.provider().clone(); let bin_path = "sway/scripts/basic_script/out/release/basic_script.bin"; let script_instance = MyScript::new(wallet, bin_path); let maturity = 10; let expiration = 20; let call_handler = script_instance.main(1, 2).with_tx_policies( TxPolicies::default() .with_maturity(maturity) .with_expiration(expiration), ); { let err = call_handler .clone() .call() .await .expect_err("maturity not reached"); assert!(err.to_string().contains("TransactionMaturity")); } { provider.produce_blocks(15, None).await?; call_handler .clone() .call() .await .expect("should succeed. Block height between `maturity` and `expiration`"); } { provider.produce_blocks(15, None).await?; let err = call_handler.call().await.expect_err("expiration reached"); assert!(err.to_string().contains("TransactionExpiration")); } Ok(()) } #[tokio::test] async fn script_tx_input_output() -> Result<()> { let [wallet_1, wallet_2] = launch_custom_provider_and_get_wallets( WalletsConfig::new(Some(2), Some(10), Some(1000)), None, None, ) .await? .try_into() .unwrap(); abigen!(Script( name = "TxScript", abi = "e2e/sway/scripts/script_tx_input_output/out/release/script_tx_input_output-abi.json" )); let script_binary = "sway/scripts/script_tx_input_output/out/release/script_tx_input_output.bin"; // Set `wallet_1` as the custom input owner let configurables = TxScriptConfigurables::default().with_OWNER(wallet_1.address())?; let script_instance = TxScript::new(wallet_2.clone(), script_binary).with_configurables(configurables); let asset_id = AssetId::zeroed(); { let custom_inputs = wallet_1 .get_asset_inputs_for_amount(asset_id, 10, None) .await? .into_iter() .take(1) .collect(); let custom_output = vec![Output::change(wallet_1.address(), 0, asset_id)]; // Input at first position is a coin owned by wallet_1 // Output at first position is change to wallet_1 // ANCHOR: script_custom_inputs_outputs let _ = script_instance .main(0, 0) .with_inputs(custom_inputs) .with_outputs(custom_output) .add_signer(wallet_1.signer().clone()) .call() .await?; // ANCHOR_END: script_custom_inputs_outputs } { // Input at first position is not a coin owned by wallet_1 let err = script_instance.main(0, 0).call().await.unwrap_err(); assert!(err.to_string().contains("wrong owner")); let custom_input = wallet_1 .get_asset_inputs_for_amount(asset_id, 10, None) .await? .pop() .unwrap(); // Input at first position is a coin owned by wallet_1 // Output at first position is not change to wallet_1 let err = script_instance .main(0, 0) .with_inputs(vec![custom_input]) .add_signer(wallet_1.signer().clone()) .call() .await .unwrap_err(); assert!(err.to_string().contains("wrong change address")); } Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/predicates.rs
e2e/tests/predicates.rs
use std::default::Default; use fuels::{ accounts::signers::private_key::PrivateKeySigner, core::{ codec::{ABIEncoder, EncoderConfig}, traits::Tokenizable, }, prelude::*, programs::executable::Executable, types::{coin::Coin, coin_type::CoinType, input::Input, message::Message, output::Output}, }; use rand::thread_rng; async fn assert_address_balance( address: &Address, provider: &Provider, asset_id: &AssetId, amount: u64, ) { let balance = provider .get_asset_balance(address, asset_id) .await .expect("Could not retrieve balance"); assert_eq!(balance, amount as u128); } fn get_test_coins_and_messages( address: Address, num_coins: u64, num_messages: u64, amount: u64, start_nonce: u64, ) -> (Vec<Coin>, Vec<Message>, AssetId) { let asset_id = AssetId::zeroed(); let coins = setup_single_asset_coins(address, asset_id, num_coins, amount); let messages = (0..num_messages) .map(|i| { setup_single_message( Address::default(), address, amount, (start_nonce + i).into(), vec![], ) }) .collect(); (coins, messages, asset_id) } fn get_test_message_w_data(address: Address, amount: u64, nonce: u64) -> Message { setup_single_message( Address::default(), address, amount, nonce.into(), vec![1, 2, 3], ) } // Setup function used to assign coins and messages to a predicate address // and create a `receiver` wallet async fn setup_predicate_test( predicate_address: Address, num_coins: u64, num_messages: u64, amount: u64, ) -> Result<(Provider, u64, Wallet, u64, AssetId, Wallet)> { let receiver_num_coins = 1; let receiver_amount = 1; let receiver_balance = receiver_num_coins * receiver_amount; let predicate_balance = (num_coins + num_messages) * amount; let mut rng = thread_rng(); let receiver_signer = PrivateKeySigner::random(&mut rng); let extra_wallet_signer = PrivateKeySigner::random(&mut rng); let (mut coins, messages, asset_id) = get_test_coins_and_messages(predicate_address, num_coins, num_messages, amount, 0); coins.extend(setup_single_asset_coins( receiver_signer.address(), asset_id, receiver_num_coins, receiver_amount, )); coins.extend(setup_single_asset_coins( extra_wallet_signer.address(), AssetId::zeroed(), 10_000, 10_000, )); coins.extend(setup_single_asset_coins( predicate_address, AssetId::from([1u8; 32]), num_coins, amount, )); let provider = setup_test_provider(coins, messages, None, None).await?; let receiver_wallet = Wallet::new(receiver_signer.clone(), provider.clone()); let extra_wallet = Wallet::new(extra_wallet_signer.clone(), provider.clone()); Ok(( provider, predicate_balance, receiver_wallet, receiver_balance, asset_id, extra_wallet, )) } #[tokio::test] async fn transfer_coins_and_messages_to_predicate() -> Result<()> { let num_coins = 16; let num_messages = 32; let amount = 64; let balance_to_send = 42; let signer = PrivateKeySigner::random(&mut thread_rng()); let (coins, messages, asset_id) = get_test_coins_and_messages(signer.address(), num_coins, num_messages, amount, 0); let provider = setup_test_provider(coins, messages, None, None).await?; let wallet = Wallet::new(signer, provider.clone()); let predicate = Predicate::load_from("sway/predicates/basic_predicate/out/release/basic_predicate.bin")? .with_provider(provider.clone()); wallet .transfer( predicate.address(), balance_to_send, asset_id, TxPolicies::default(), ) .await?; // The predicate has received the funds assert_address_balance(&predicate.address(), &provider, &asset_id, balance_to_send).await; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_basic() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/basic_predicate/out/release/basic_predicate-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(4097, 4097)?; let mut predicate: Predicate = Predicate::load_from("sway/predicates/basic_predicate/out/release/basic_predicate.bin")? .with_data(predicate_data); let num_coins = 4; let num_messages = 8; let amount = 16; let (provider, predicate_balance, receiver, receiver_balance, asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let amount_to_send = 128; let fee = predicate .transfer( receiver.address(), amount_to_send, asset_id, TxPolicies::default(), ) .await? .tx_status .total_fee; // The predicate has spent the funds let predicate_current_balance = predicate_balance - amount_to_send - fee; assert_address_balance( &predicate.address(), &provider, &asset_id, predicate_current_balance, ) .await; // Funds were transferred assert_address_balance( &receiver.address(), &provider, &asset_id, receiver_balance + amount_to_send, ) .await; Ok(()) } #[tokio::test] async fn pay_with_predicate() -> Result<()> { abigen!( Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" ), Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/u64/out/release/u64-abi.json" ) ); let predicate_data = MyPredicateEncoder::default().encode_data(32768)?; let mut predicate: Predicate = Predicate::load_from("sway/types/predicates/u64/out/release/u64.bin")? .with_data(predicate_data); let num_coins = 4; let num_messages = 8; let amount = 16; let (provider, predicate_balance, _receiver, _receiver_balance, _asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let deploy_response = Contract::load_from( "sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&predicate, TxPolicies::default()) .await?; let contract_methods = MyContract::new(deploy_response.contract_id, predicate.clone()).methods(); let consensus_parameters = provider.consensus_parameters().await?; let deploy_fee = deploy_response.tx_status.unwrap().total_fee; assert_eq!( predicate .get_asset_balance(consensus_parameters.base_asset_id()) .await?, (predicate_balance - deploy_fee) as u128 ); let response = contract_methods .initialize_counter(42) // Build the ABI call .call() .await?; assert_eq!(42, response.value); assert_eq!( predicate .get_asset_balance(consensus_parameters.base_asset_id()) .await?, (predicate_balance - deploy_fee - response.tx_status.total_fee) as u128 ); Ok(()) } #[tokio::test] async fn pay_with_predicate_vector_data() -> Result<()> { abigen!( Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" ), Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_vector/out/release/predicate_vector-abi.json" ) ); let predicate_data = MyPredicateEncoder::default().encode_data(12, 30, vec![2, 4, 42])?; let mut predicate: Predicate = Predicate::load_from( "sway/types/predicates/predicate_vector/out/release/predicate_vector.bin", )? .with_data(predicate_data); let num_coins = 4; let num_messages = 8; let amount = 16; let (provider, predicate_balance, _receiver, _receiver_balance, _asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let deploy_response = Contract::load_from( "sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&predicate, TxPolicies::default()) .await?; let contract_methods = MyContract::new(deploy_response.contract_id, predicate.clone()).methods(); let consensus_parameters = provider.consensus_parameters().await?; let deploy_fee = deploy_response.tx_status.unwrap().total_fee; assert_eq!( predicate .get_asset_balance(consensus_parameters.base_asset_id()) .await?, (predicate_balance - deploy_fee) as u128 ); let response = contract_methods.initialize_counter(42).call().await?; assert_eq!(42, response.value); assert_eq!( predicate .get_asset_balance(consensus_parameters.base_asset_id()) .await?, (predicate_balance - deploy_fee - response.tx_status.total_fee) as u128 ); Ok(()) } #[tokio::test] async fn predicate_contract_transfer() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_vector/out/release/predicate_vector-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(2, 40, vec![2, 4, 42])?; let mut predicate: Predicate = Predicate::load_from( "sway/types/predicates/predicate_vector/out/release/predicate_vector.bin", )? .with_data(predicate_data); let num_coins = 4; let num_messages = 8; let amount = 300; let (provider, _predicate_balance, _receiver, _receiver_balance, _asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let contract_id = Contract::load_from( "sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&predicate, TxPolicies::default()) .await? .contract_id; let contract_balances = provider.get_contract_balances(&contract_id).await?; assert!(contract_balances.is_empty()); let amount = 300; predicate .force_transfer_to_contract( contract_id, amount, AssetId::zeroed(), TxPolicies::default(), ) .await?; let contract_balances = predicate .try_provider()? .get_contract_balances(&contract_id) .await?; assert_eq!(contract_balances.len(), 1); let random_asset_balance = contract_balances.get(&AssetId::zeroed()).unwrap(); assert_eq!(*random_asset_balance, 300); Ok(()) } #[tokio::test] async fn predicate_transfer_to_base_layer() -> Result<()> { use std::str::FromStr; abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_vector/out/release/predicate_vector-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(22, 20, vec![2, 4, 42])?; let mut predicate: Predicate = Predicate::load_from( "sway/types/predicates/predicate_vector/out/release/predicate_vector.bin", )? .with_data(predicate_data); let num_coins = 4; let num_messages = 8; let amount = 300; let (provider, _predicate_balance, _receiver, _receiver_balance, _asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let amount = 1000; let base_layer_address = Address::from_str("0x4710162c2e3a95a6faff05139150017c9e38e5e280432d546fae345d6ce6d8fe")?; let withdraw_response = predicate .withdraw_to_base_layer(base_layer_address, amount, TxPolicies::default()) .await?; // Create the next commit block to be able generate the proof provider.produce_blocks(1, None).await?; let proof = predicate .try_provider()? .get_message_proof( &withdraw_response.tx_id, &withdraw_response.nonce, None, Some(2), ) .await?; assert_eq!(proof.amount, amount); assert_eq!(proof.recipient, base_layer_address); Ok(()) } #[tokio::test] async fn predicate_transfer_with_signed_resources() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_vector/out/release/predicate_vector-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(2, 40, vec![2, 4, 42])?; let mut predicate: Predicate = Predicate::load_from( "sway/types/predicates/predicate_vector/out/release/predicate_vector.bin", )? .with_data(predicate_data); let predicate_num_coins = 4; let predicate_num_messages = 3; let predicate_amount = 1000; let predicate_balance = (predicate_num_coins + predicate_num_messages) * predicate_amount; let signer = PrivateKeySigner::random(&mut thread_rng()); let wallet_num_coins = 4; let wallet_num_messages = 3; let wallet_amount = 1000; let wallet_balance = (wallet_num_coins + wallet_num_messages) * wallet_amount; let (mut coins, mut messages, asset_id) = get_test_coins_and_messages( predicate.address(), predicate_num_coins, predicate_num_messages, predicate_amount, 0, ); let (wallet_coins, wallet_messages, _) = get_test_coins_and_messages( signer.address(), wallet_num_coins, wallet_num_messages, wallet_amount, predicate_num_messages, ); coins.extend(wallet_coins); messages.extend(wallet_messages); let provider = setup_test_provider(coins, messages, None, None).await?; let wallet = Wallet::new(signer.clone(), provider.clone()); predicate.set_provider(provider.clone()); let mut inputs = wallet .get_asset_inputs_for_amount(asset_id, wallet_balance.into(), None) .await?; let predicate_inputs = predicate .get_asset_inputs_for_amount(asset_id, predicate_balance.into(), None) .await?; inputs.extend(predicate_inputs); let outputs = vec![Output::change(predicate.address(), 0, asset_id)]; let mut tb = ScriptTransactionBuilder::prepare_transfer(inputs, outputs, Default::default()); tb.add_signer(signer)?; let tx = tb.build(&provider).await?; let tx_status = provider.send_transaction_and_await_commit(tx).await?; assert_address_balance( &predicate.address(), &provider, &asset_id, predicate_balance + wallet_balance - tx_status.total_fee(), ) .await; Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn contract_tx_and_call_params_with_predicate() -> Result<()> { use fuels::prelude::*; abigen!( Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" ), Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_vector/out/release/predicate_vector-abi.json" ) ); let predicate_data = MyPredicateEncoder::default().encode_data(22, 20, vec![2, 4, 42])?; let mut predicate: Predicate = Predicate::load_from( "sway/types/predicates/predicate_vector/out/release/predicate_vector.bin", )? .with_data(predicate_data); let num_coins = 1; let num_messages = 1; let amount = 1000; let (provider, predicate_balance, _receiver, _receiver_balance, _asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let deploy_response = Contract::load_from( "./sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&predicate, TxPolicies::default()) .await?; let contract_methods = MyContract::new(deploy_response.contract_id, predicate.clone()).methods(); let tx_policies = TxPolicies::default().with_tip(100); let call_params_amount = 100; let call_params = CallParameters::default() .with_amount(call_params_amount) .with_asset_id(AssetId::zeroed()); { let call_response = contract_methods .get_msg_amount() .with_tx_policies(tx_policies) .call_params(call_params.clone())? .call() .await?; let deploy_fee = deploy_response.tx_status.unwrap().total_fee; let call_fee = call_response.tx_status.total_fee; assert_eq!( predicate.get_asset_balance(&AssetId::zeroed()).await?, (predicate_balance - deploy_fee - call_params_amount - call_fee) as u128 ); } { let custom_asset = AssetId::from([1u8; 32]); let response = contract_methods .get_msg_amount() .call_params(call_params)? .add_custom_asset(custom_asset, 100, Some(Address::default())) .call() .await?; assert_eq!(predicate.get_asset_balance(&custom_asset).await?, 900); } Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn diff_asset_predicate_payment() -> Result<()> { use fuels::prelude::*; abigen!( Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" ), Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_vector/out/release/predicate_vector-abi.json" ) ); let predicate_data = MyPredicateEncoder::default().encode_data(28, 14, vec![2, 4, 42])?; let mut predicate: Predicate = Predicate::load_from( "sway/types/predicates/predicate_vector/out/release/predicate_vector.bin", )? .with_data(predicate_data); let num_coins = 1; let num_messages = 1; let amount = 1_000_000_000; let (provider, _predicate_balance, _receiver, _receiver_balance, _asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let contract_id = Contract::load_from( "./sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&predicate, TxPolicies::default()) .await? .contract_id; let contract_methods = MyContract::new(contract_id, predicate.clone()).methods(); let call_params = CallParameters::default() .with_amount(1_000_000) .with_asset_id(AssetId::from([1u8; 32])); let response = contract_methods .get_msg_amount() .call_params(call_params)? .call() .await?; Ok(()) } #[tokio::test] async fn predicate_default_configurables() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/predicate_configurables/out/release/predicate_configurables-abi.json" )); let new_struct = StructWithGeneric { field_1: 8u8, field_2: 16, }; let new_enum = EnumWithGeneric::VariantOne(true); let predicate_data = MyPredicateEncoder::default().encode_data( true, 8, (8, true), [253, 254, 255], new_struct, new_enum, )?; let mut predicate: Predicate = Predicate::load_from( "sway/predicates/predicate_configurables/out/release/predicate_configurables.bin", )? .with_data(predicate_data); let num_coins = 4; let num_messages = 8; let amount = 16; let (provider, predicate_balance, receiver, receiver_balance, asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let amount_to_send = predicate_balance - 1; predicate .transfer( receiver.address(), amount_to_send, asset_id, TxPolicies::default(), ) .await?; // The predicate has spent the funds assert_address_balance(&predicate.address(), &provider, &asset_id, 0).await; // Funds were transferred assert_address_balance( &receiver.address(), &provider, &asset_id, receiver_balance + amount_to_send, ) .await; Ok(()) } #[tokio::test] async fn predicate_configurables() -> Result<()> { // ANCHOR: predicate_configurables abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/predicate_configurables/out/release/predicate_configurables-abi.json" )); let new_tuple = (16, false); let new_array = [123, 124, 125]; let new_struct = StructWithGeneric { field_1: 32u8, field_2: 64, }; let new_enum = EnumWithGeneric::VariantTwo; let configurables = MyPredicateConfigurables::default() .with_U8(8)? .with_TUPLE(new_tuple)? .with_ARRAY(new_array)? .with_STRUCT(new_struct.clone())? .with_ENUM(new_enum.clone())?; let predicate_data = MyPredicateEncoder::default() .encode_data(true, 8u8, new_tuple, new_array, new_struct, new_enum)?; let mut predicate: Predicate = Predicate::load_from( "sway/predicates/predicate_configurables/out/release/predicate_configurables.bin", )? .with_data(predicate_data) .with_configurables(configurables); // ANCHOR_END: predicate_configurables let num_coins = 4; let num_messages = 8; let amount = 16; let (provider, predicate_balance, receiver, receiver_balance, asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let amount_to_send = predicate_balance - 1; let fee = predicate .transfer( receiver.address(), amount_to_send, asset_id, TxPolicies::default(), ) .await? .tx_status .total_fee; // The predicate has spent the funds assert_address_balance(&predicate.address(), &provider, &asset_id, 0).await; // Funds were transferred assert_address_balance( &receiver.address(), &provider, &asset_id, receiver_balance + predicate_balance - fee, ) .await; Ok(()) } #[tokio::test] async fn predicate_adjust_fee_persists_message_w_data() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/basic_predicate/out/release/basic_predicate-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(4097, 4097)?; let mut predicate: Predicate = Predicate::load_from("sway/predicates/basic_predicate/out/release/basic_predicate.bin")? .with_data(predicate_data); let amount = 1000; let coins = setup_single_asset_coins(predicate.address(), AssetId::zeroed(), 1, amount); let message = get_test_message_w_data(predicate.address(), amount, Default::default()); let message_input = Input::resource_predicate( CoinType::Message(message.clone()), predicate.code().to_vec(), predicate.data().to_vec(), ); let provider = setup_test_provider(coins, vec![message.clone()], None, None).await?; predicate.set_provider(provider.clone()); let mut tb = ScriptTransactionBuilder::prepare_transfer( vec![message_input.clone()], vec![], TxPolicies::default(), ); predicate.adjust_for_fee(&mut tb, 0).await?; let tx = tb.build(&provider).await?; assert_eq!(tx.inputs().len(), 2); assert_eq!(tx.inputs()[0].message_id().unwrap(), message.message_id()); Ok(()) } #[tokio::test] async fn predicate_transfer_non_base_asset() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/basic_predicate/out/release/basic_predicate-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(32, 32)?; let mut predicate: Predicate = Predicate::load_from("sway/predicates/basic_predicate/out/release/basic_predicate.bin")? .with_data(predicate_data); let signer = PrivateKeySigner::random(&mut thread_rng()); let amount = 5; let non_base_asset_id = AssetId::new([1; 32]); // wallet has base and predicate non base asset let mut coins = setup_single_asset_coins(signer.address(), AssetId::zeroed(), 1, amount); coins.extend(setup_single_asset_coins( predicate.address(), non_base_asset_id, 1, amount, )); let provider = setup_test_provider(coins, vec![], None, None).await?; predicate.set_provider(provider.clone()); let wallet = Wallet::new(signer.clone(), provider.clone()); let inputs = predicate .get_asset_inputs_for_amount(non_base_asset_id, amount.into(), None) .await?; let consensus_parameters = provider.consensus_parameters().await?; let outputs = vec![ Output::change(wallet.address(), 0, non_base_asset_id), Output::change(wallet.address(), 0, *consensus_parameters.base_asset_id()), ]; let mut tb = ScriptTransactionBuilder::prepare_transfer( inputs, outputs, TxPolicies::default().with_tip(1), ); tb.add_signer(signer)?; wallet.adjust_for_fee(&mut tb, 0).await?; let tx = tb.build(&provider).await?; provider .send_transaction_and_await_commit(tx) .await? .check(None)?; let wallet_balance = wallet.get_asset_balance(&non_base_asset_id).await?; assert_eq!(wallet_balance, amount as u128); Ok(()) } #[tokio::test] async fn predicate_can_access_manually_added_witnesses() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/predicate_witnesses/out/release/predicate_witnesses-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(0, 1)?; let mut predicate: Predicate = Predicate::load_from( "sway/predicates/predicate_witnesses/out/release/predicate_witnesses.bin", )? .with_data(predicate_data); let num_coins = 4; let num_messages = 0; let amount = 16; let (provider, predicate_balance, receiver, receiver_balance, asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let amount_to_send = 12u64; let inputs = predicate .get_asset_inputs_for_amount(asset_id, amount_to_send.into(), None) .await?; let outputs = predicate.get_asset_outputs_for_amount(receiver.address(), asset_id, amount_to_send); let mut tx = ScriptTransactionBuilder::prepare_transfer( inputs, outputs, TxPolicies::default().with_witness_limit(32), ) .build(&provider) .await?; let witness = ABIEncoder::default().encode(&[64u64.into_token()])?; // u64 because this is VM memory let witness2 = ABIEncoder::default().encode(&[4096u64.into_token()])?; tx.append_witness(witness.into())?; tx.append_witness(witness2.into())?; let tx_status = provider.send_transaction_and_await_commit(tx).await?; let fee = tx_status.total_fee(); // The predicate has spent the funds assert_address_balance( &predicate.address(), &provider, &asset_id, predicate_balance - amount_to_send - fee, ) .await; // Funds were transferred assert_address_balance( &receiver.address(), &provider, &asset_id, receiver_balance + amount_to_send, ) .await; Ok(()) } #[tokio::test] async fn tx_id_not_changed_after_adding_witnesses() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/predicate_witnesses/out/release/predicate_witnesses-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(0, 1)?; let mut predicate: Predicate = Predicate::load_from( "sway/predicates/predicate_witnesses/out/release/predicate_witnesses.bin", )? .with_data(predicate_data); let num_coins = 4; let num_messages = 0; let amount = 16; let (provider, _predicate_balance, receiver, _receiver_balance, asset_id, _) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let amount_to_send = 12u64; let inputs = predicate .get_asset_inputs_for_amount(asset_id, amount_to_send.into(), None) .await?; let outputs = predicate.get_asset_outputs_for_amount(receiver.address(), asset_id, amount_to_send); let mut tx = ScriptTransactionBuilder::prepare_transfer( inputs, outputs, TxPolicies::default().with_witness_limit(32), ) .build(&provider) .await?; let consensus_parameters = provider.consensus_parameters().await?; let chain_id = consensus_parameters.chain_id(); let tx_id = tx.id(chain_id); let witness = ABIEncoder::default().encode(&[64u64.into_token()])?; // u64 because this is VM memory let witness2 = ABIEncoder::default().encode(&[4096u64.into_token()])?; tx.append_witness(witness.into())?; tx.append_witness(witness2.into())?; let tx_id_after_witnesses = tx.id(chain_id); let tx_id_from_provider = provider.send_transaction(tx).await?; assert_eq!(tx_id, tx_id_after_witnesses); assert_eq!(tx_id, tx_id_from_provider); Ok(()) } #[tokio::test] async fn predicate_encoder_config_is_applied() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/basic_predicate/out/release/basic_predicate-abi.json" )); { let _encoding_ok = MyPredicateEncoder::default() .encode_data(4097, 4097) .expect("should not fail as it uses the default encoder config"); } { let encoder_config = EncoderConfig { max_tokens: 1, ..Default::default() }; let encoding_error = MyPredicateEncoder::new(encoder_config) .encode_data(4097, 4097) .expect_err("should fail"); assert!( encoding_error .to_string() .contains("token limit `1` reached while encoding") ); } Ok(()) } #[tokio::test] async fn predicate_transfers_non_base_asset() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/basic_predicate/out/release/basic_predicate-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(4097, 4097)?; let mut predicate: Predicate = Predicate::load_from("sway/predicates/basic_predicate/out/release/basic_predicate.bin")? .with_data(predicate_data); let num_coins = 4; let num_message = 6; let amount = 20; let (provider, _, receiver, _, _, _) = setup_predicate_test(predicate.address(), num_coins, num_message, amount).await?; predicate.set_provider(provider); let other_asset_id = AssetId::from([1u8; 32]); let send_amount = num_coins * amount; predicate .transfer( receiver.address(), send_amount, other_asset_id, TxPolicies::default(), ) .await?; assert_eq!(predicate.get_asset_balance(&other_asset_id).await?, 0,); assert_eq!( receiver.get_asset_balance(&other_asset_id).await?, send_amount as u128, ); Ok(()) } #[tokio::test] async fn predicate_with_invalid_data_fails() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/basic_predicate/out/release/basic_predicate-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(0, 100)?;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
true
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/storage.rs
e2e/tests/storage.rs
use fuels::{ prelude::*, tx::StorageSlot, types::{Bits256, Bytes32}, }; #[tokio::test] async fn test_storage_initialization() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/storage/out/release/storage-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let key = Bytes32::from([1u8; 32]); let value = Bytes32::from([2u8; 32]); let storage_slot = StorageSlot::new(key, value); let storage_vec = vec![storage_slot.clone()]; let storage_configuration = StorageConfiguration::default().add_slot_overrides(storage_vec); let contract_id = Contract::load_from( "sway/contracts/storage/out/release/storage.bin", LoadConfiguration::default().with_storage_configuration(storage_configuration), )? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let contract_instance = MyContract::new(contract_id, wallet.clone()); let result = contract_instance .methods() .get_value_b256(Bits256(key.into())) .call() .await? .value; assert_eq!(result.0, *value); Ok(()) } #[tokio::test] async fn test_init_storage_automatically() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/storage/out/release/storage-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let contract_id = Contract::load_from( "sway/contracts/storage/out/release/storage.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let contract_methods = MyContract::new(contract_id, wallet.clone()).methods(); { let key: Bytes32 = "eb390d9f85c8c849ff8aeb05c865ca66b37ba69a7bec8489b1c467f029b650af".parse()?; let value = contract_methods .get_value_b256(Bits256(*key)) .call() .await? .value; assert_eq!(value.0, [1u8; 32]); } { let key: Bytes32 = "419b1120ea993203d7e223dfbe76184322453d6f8de946e827a8669102ab395b".parse()?; let value = contract_methods .get_value_u64(Bits256(*key)) .call() .await? .value; assert_eq!(value, 64); } Ok(()) } #[tokio::test] async fn storage_load_error_messages() { { let json_path = "sway/contracts/storage/out/release/no_file_on_path.json"; let expected_error = format!("io: file \"{json_path}\" does not exist"); let error = StorageConfiguration::default() .add_slot_overrides_from_file(json_path) .expect_err("should have failed"); assert_eq!(error.to_string(), expected_error); } { let json_path = "sway/contracts/storage/out/release/storage.bin"; let expected_error = format!("expected \"{json_path}\" to have '.json' extension"); let error = StorageConfiguration::default() .add_slot_overrides_from_file(json_path) .expect_err("should have failed"); assert_eq!(error.to_string(), expected_error); } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/debug_utils.rs
e2e/tests/debug_utils.rs
use fuels::{ core::{ codec::{ABIEncoder, ABIFormatter}, traits::Tokenizable, }, prelude::*, programs::{debug::ScriptType, executable::Executable}, }; #[tokio::test] async fn can_debug_single_call_tx() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "MyContract", project = "e2e/sway/types/contracts/nested_structs" )) ); let contract_id = Contract::load_from( "sway/types/contracts/nested_structs/out/release/nested_structs.bin", Default::default(), )? .contract_id(); let call_handler = MyContract::new(contract_id, wallet) .methods() .check_struct_integrity(AllStruct { some_struct: SomeStruct { field: 2, field_2: true, }, }); let abi = std::fs::read_to_string( "./sway/types/contracts/nested_structs/out/release/nested_structs-abi.json", ) .unwrap(); let decoder = ABIFormatter::from_json_abi(&abi)?; // without gas forwarding { let tb = call_handler .clone() .call_params(CallParameters::default().with_amount(10)) .unwrap() .transaction_builder() .await .unwrap(); let script = tb.script; let script_data = tb.script_data; let ScriptType::ContractCall(call_descriptions) = ScriptType::detect(&script, &script_data)? else { panic!("expected a contract call") }; assert_eq!(call_descriptions.len(), 1); let call_description = &call_descriptions[0]; assert_eq!(call_description.contract_id, contract_id); assert_eq!(call_description.amount, 10); assert_eq!(call_description.asset_id, AssetId::default()); assert_eq!( call_description.decode_fn_selector().unwrap(), "check_struct_integrity" ); assert!(call_description.gas_forwarded.is_none()); assert_eq!( decoder.decode_fn_args( &call_description.decode_fn_selector().unwrap(), call_description.encoded_args.as_slice() )?, vec!["AllStruct { some_struct: SomeStruct { field: 2, field_2: true } }"] ); } // with gas forwarding { let tb = call_handler .clone() .call_params( CallParameters::default() .with_amount(10) .with_gas_forwarded(20), ) .unwrap() .transaction_builder() .await .unwrap(); let script = tb.script; let script_data = tb.script_data; let ScriptType::ContractCall(call_descriptions) = ScriptType::detect(&script, &script_data)? else { panic!("expected a contract call") }; assert_eq!(call_descriptions.len(), 1); let call_description = &call_descriptions[0]; assert_eq!(call_description.contract_id, contract_id); assert_eq!(call_description.amount, 10); assert_eq!(call_description.asset_id, AssetId::default()); assert_eq!( call_description.decode_fn_selector().unwrap(), "check_struct_integrity" ); assert_eq!(call_description.gas_forwarded, Some(20)); assert_eq!( decoder.decode_fn_args( &call_description.decode_fn_selector().unwrap(), call_description.encoded_args.as_slice() )?, vec!["AllStruct { some_struct: SomeStruct { field: 2, field_2: true } }"] ); } Ok(()) } #[tokio::test] async fn can_debug_multi_call_tx() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "MyContract", project = "e2e/sway/types/contracts/nested_structs" )) ); let contract_id = Contract::load_from( "sway/types/contracts/nested_structs/out/release/nested_structs.bin", Default::default(), )? .contract_id(); let call1 = MyContract::new(contract_id, wallet.clone()) .methods() .check_struct_integrity(AllStruct { some_struct: SomeStruct { field: 2, field_2: true, }, }); let call2 = MyContract::new(contract_id, wallet.clone()) .methods() .i_am_called_differently( AllStruct { some_struct: SomeStruct { field: 2, field_2: true, }, }, MemoryAddress { contract_id, function_selector: 123, function_data: 456, }, ); let abi = std::fs::read_to_string( "./sway/types/contracts/nested_structs/out/release/nested_structs-abi.json", ) .unwrap(); let decoder = ABIFormatter::from_json_abi(&abi)?; // without gas forwarding { let first_call = call1 .clone() .call_params(CallParameters::default().with_amount(10)) .unwrap(); let second_call = call2 .clone() .call_params(CallParameters::default().with_amount(20)) .unwrap(); let tb = CallHandler::new_multi_call(wallet.clone()) .add_call(first_call) .add_call(second_call) .transaction_builder() .await .unwrap(); let script = tb.script; let script_data = tb.script_data; let ScriptType::ContractCall(call_descriptions) = ScriptType::detect(&script, &script_data)? else { panic!("expected a contract call") }; assert_eq!(call_descriptions.len(), 2); let call_description = &call_descriptions[0]; assert_eq!(call_description.contract_id, contract_id); assert_eq!(call_description.amount, 10); assert_eq!(call_description.asset_id, AssetId::default()); assert_eq!( call_description.decode_fn_selector().unwrap(), "check_struct_integrity" ); assert!(call_description.gas_forwarded.is_none()); assert_eq!( decoder.decode_fn_args( &call_description.decode_fn_selector().unwrap(), call_description.encoded_args.as_slice() )?, vec!["AllStruct { some_struct: SomeStruct { field: 2, field_2: true } }"] ); let call_description = &call_descriptions[1]; let fn_selector = call_description.decode_fn_selector().unwrap(); assert_eq!(call_description.contract_id, contract_id); assert_eq!(call_description.amount, 20); assert_eq!(call_description.asset_id, AssetId::default()); assert_eq!(fn_selector, "i_am_called_differently"); assert!(call_description.gas_forwarded.is_none()); assert_eq!( decoder.decode_fn_args(&fn_selector, call_description.encoded_args.as_slice())?, vec![ "AllStruct { some_struct: SomeStruct { field: 2, field_2: true } }".to_string(), format!( "MemoryAddress {{ contract_id: std::contract_id::ContractId {{ bits: Bits256({:?}) }}, function_selector: 123, function_data: 456 }}", contract_id.as_slice() ) ] ); } // with gas forwarding { let first_call = call1 .clone() .call_params( CallParameters::default() .with_amount(10) .with_gas_forwarded(15), ) .unwrap(); let second_call = call2 .clone() .call_params( CallParameters::default() .with_amount(20) .with_gas_forwarded(25), ) .unwrap(); let tb = CallHandler::new_multi_call(wallet.clone()) .add_call(first_call) .add_call(second_call) .transaction_builder() .await .unwrap(); let script = tb.script; let script_data = tb.script_data; let ScriptType::ContractCall(call_descriptions) = ScriptType::detect(&script, &script_data)? else { panic!("expected a contract call") }; assert_eq!(call_descriptions.len(), 2); let call_description = &call_descriptions[0]; assert_eq!(call_description.contract_id, contract_id); assert_eq!(call_description.amount, 10); assert_eq!(call_description.asset_id, AssetId::default()); assert_eq!( call_description.decode_fn_selector().unwrap(), "check_struct_integrity" ); assert_eq!(call_description.gas_forwarded, Some(15)); assert_eq!( decoder.decode_fn_args( &call_description.decode_fn_selector().unwrap(), call_description.encoded_args.as_slice() )?, vec!["AllStruct { some_struct: SomeStruct { field: 2, field_2: true } }"] ); let call_description = &call_descriptions[1]; assert_eq!(call_description.contract_id, contract_id); assert_eq!(call_description.amount, 20); assert_eq!(call_description.asset_id, AssetId::default()); assert_eq!( call_description.decode_fn_selector().unwrap(), "i_am_called_differently" ); assert_eq!(call_description.gas_forwarded, Some(25)); assert_eq!( decoder.decode_fn_args( &call_description.decode_fn_selector().unwrap(), call_description.encoded_args.as_slice() )?, vec![ "AllStruct { some_struct: SomeStruct { field: 2, field_2: true } }".to_string(), format!( "MemoryAddress {{ contract_id: std::contract_id::ContractId {{ bits: Bits256({:?}) }}, function_selector: 123, function_data: 456 }}", contract_id.as_slice() ) ] ); } Ok(()) } #[tokio::test] async fn can_debug_sway_script() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/script_struct" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let tb = script_instance .main(MyStruct { number: 10, boolean: false, }) .transaction_builder() .await .unwrap(); let abi = std::fs::read_to_string("./sway/scripts/script_struct/out/release/script_struct-abi.json")?; let decoder = ABIFormatter::from_json_abi(abi)?; let ScriptType::Other(desc) = ScriptType::detect(&tb.script, &tb.script_data).unwrap() else { panic!("expected a script") }; assert_eq!( decoder.decode_fn_args("main", desc.data.as_slice())?, vec!["MyStruct { number: 10, boolean: false }"] ); assert_eq!( decoder .decode_configurables(desc.data_section().unwrap()) .unwrap(), vec![ ("A_NUMBER".to_owned(), "11".to_owned()), ( "MY_STRUCT".to_owned(), "MyStruct { number: 10, boolean: true }".to_owned() ), ] ); Ok(()) } #[tokio::test] async fn debugs_sway_script_with_no_configurables() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/basic_script" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let tb = script_instance .main(10, 11) .transaction_builder() .await .unwrap(); let ScriptType::Other(desc) = ScriptType::detect(&tb.script, &tb.script_data).unwrap() else { panic!("expected a script") }; assert!(desc.data_section().is_none()); Ok(()) } fn generate_modern_sway_binary(len: usize) -> Vec<u8> { assert!( len > 24, "needs at least 24B to fit in the indicator_of_modern_binary, data & configurables offsets" ); let mut custom_script = vec![0; len]; let indicator_of_modern_binary = fuel_asm::op::jmpf(0x00, 0x04); custom_script[4..8].copy_from_slice(&indicator_of_modern_binary.to_bytes()); custom_script } #[tokio::test] async fn data_section_offset_not_set_if_out_of_bounds() -> Result<()> { let mut custom_script = generate_modern_sway_binary(100); custom_script[16..24].copy_from_slice(&u64::MAX.to_be_bytes()); let ScriptType::Other(desc) = ScriptType::detect(&custom_script, &[]).unwrap() else { panic!("expected a script") }; assert!(desc.data_section_offset.is_none()); Ok(()) } #[tokio::test] async fn can_detect_a_loader_script_w_data_section() -> Result<()> { setup_program_test!(Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/script_struct" ))); let script_data = ABIEncoder::default() .encode(&[MyStruct { number: 10, boolean: false, } .into_token()]) .unwrap(); let executable = Executable::load_from("sway/scripts/script_struct/out/release/script_struct.bin") .unwrap() .convert_to_loader() .unwrap(); let expected_blob_id = executable.blob().id(); let script = executable.code(); let ScriptType::Loader { script, blob_id } = ScriptType::detect(&script, &script_data).unwrap() else { panic!("expected a loader script") }; assert_eq!(blob_id, expected_blob_id); let decoder = ABIFormatter::from_json_abi(std::fs::read_to_string( "./sway/scripts/script_struct/out/release/script_struct-abi.json", )?)?; assert_eq!( decoder.decode_fn_args("main", script.data.as_slice())?, vec!["MyStruct { number: 10, boolean: false }"] ); assert_eq!( decoder .decode_configurables(script.data_section().unwrap()) .unwrap(), vec![ ("A_NUMBER".to_owned(), "11".to_owned()), ( "MY_STRUCT".to_owned(), "MyStruct { number: 10, boolean: true }".to_owned() ), ] ); Ok(()) } #[tokio::test] async fn can_detect_a_loader_script_wo_data_section() -> Result<()> { setup_program_test!(Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/empty" ))); let executable = Executable::load_from("sway/scripts/empty/out/release/empty.bin") .unwrap() .convert_to_loader() .unwrap(); let expected_blob_id = executable.blob().id(); let script = executable.code(); let ScriptType::Loader { blob_id, .. } = ScriptType::detect(&script, &[]).unwrap() else { panic!("expected a loader script") }; assert_eq!(blob_id, expected_blob_id); Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/contracts.rs
e2e/tests/contracts.rs
use std::time::Duration; use fuel_tx::SubAssetId; use fuels::{ accounts::signers::private_key::PrivateKeySigner, core::codec::{DecoderConfig, EncoderConfig, calldata, encode_fn_selector}, prelude::*, programs::DEFAULT_MAX_FEE_ESTIMATION_TOLERANCE, tx::{ ConsensusParameters, ContractIdExt, ContractParameters, FeeParameters, consensus_parameters::{ConsensusParametersV1, FeeParametersV1}, }, types::{ Bits256, Identity, SizedAsciiString, errors::transaction::Reason, input::Input, output::Output, }, }; use rand::thread_rng; use tokio::time::Instant; #[tokio::test] async fn test_multiple_args() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); // Make sure we can call the contract with multiple arguments let contract_methods = contract_instance.methods(); let response = contract_methods.get(5, 6).call().await?; assert_eq!(response.value, 11); let t = MyType { x: 5, y: 6 }; let response = contract_methods.get_alt(t.clone()).call().await?; assert_eq!(response.value, t); let response = contract_methods.get_single(5).call().await?; assert_eq!(response.value, 5); Ok(()) } #[tokio::test] async fn test_contract_calling_contract() -> Result<()> { // Tests a contract call that calls another contract (FooCaller calls FooContract underneath) setup_program_test!( Wallets("wallet"), Abigen( Contract( name = "LibContract", project = "e2e/sway/contracts/lib_contract" ), Contract( name = "LibContractCaller", project = "e2e/sway/contracts/lib_contract_caller" ), ), Deploy( name = "lib_contract_instance", contract = "LibContract", wallet = "wallet", random_salt = false, ), Deploy( name = "lib_contract_instance2", contract = "LibContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_caller_instance", contract = "LibContractCaller", wallet = "wallet", random_salt = false, ), ); let lib_contract_id = lib_contract_instance.contract_id(); let lib_contract_id2 = lib_contract_instance2.contract_id(); // Call the contract directly. It increments the given value. let response = lib_contract_instance.methods().increment(42).call().await?; assert_eq!(43, response.value); let response = contract_caller_instance .methods() .increment_from_contracts(lib_contract_id, lib_contract_id2, 42) // Note that the two lib_contract_instances have different types .with_contracts(&[&lib_contract_instance, &lib_contract_instance2]) .call() .await?; assert_eq!(86, response.value); // ANCHOR: external_contract let response = contract_caller_instance .methods() .increment_from_contract(lib_contract_id, 42) .with_contracts(&[&lib_contract_instance]) .call() .await?; // ANCHOR_END: external_contract assert_eq!(43, response.value); // ANCHOR: external_contract_ids let response = contract_caller_instance .methods() .increment_from_contract(lib_contract_id, 42) .with_contract_ids(&[lib_contract_id]) .call() .await?; // ANCHOR_END: external_contract_ids assert_eq!(43, response.value); Ok(()) } #[tokio::test] async fn test_reverting_transaction() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "RevertContract", project = "e2e/sway/contracts/revert_transaction_error" )), Deploy( name = "contract_instance", contract = "RevertContract", wallet = "wallet", random_salt = false, ), ); let response = contract_instance .methods() .make_transaction_fail(true) .call() .await; assert!(matches!( response, Err(Error::Transaction(Reason::Failure { revert_id, .. })) if revert_id == Some(128) )); Ok(()) } #[tokio::test] async fn test_multiple_read_calls() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "MultiReadContract", project = "e2e/sway/contracts/multiple_read_calls" )), Deploy( name = "contract_instance", contract = "MultiReadContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); contract_methods.store(42).call().await?; // Use "simulate" because the methods don't actually // run a transaction, but just a dry-run let stored = contract_methods .read() .simulate(Execution::state_read_only()) .await?; assert_eq!(stored.value, 42); let stored = contract_methods .read() .simulate(Execution::state_read_only()) .await?; assert_eq!(stored.value, 42); Ok(()) } #[tokio::test] async fn test_multi_call_beginner() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let call_handler_1 = contract_methods.get_single(7); let call_handler_2 = contract_methods.get_single(42); let multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let (val_1, val_2): (u64, u64) = multi_call_handler.call().await?.value; assert_eq!(val_1, 7); assert_eq!(val_2, 42); Ok(()) } #[tokio::test] async fn test_multi_call_pro() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let my_type_1 = MyType { x: 1, y: 2 }; let my_type_2 = MyType { x: 3, y: 4 }; let contract_methods = contract_instance.methods(); let call_handler_1 = contract_methods.get_single(5); let call_handler_2 = contract_methods.get_single(6); let call_handler_3 = contract_methods.get_alt(my_type_1.clone()); let call_handler_4 = contract_methods.get_alt(my_type_2.clone()); let call_handler_5 = contract_methods.get_array([7; 2]); let call_handler_6 = contract_methods.get_array([42; 2]); let multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2) .add_call(call_handler_3) .add_call(call_handler_4) .add_call(call_handler_5) .add_call(call_handler_6); let (val_1, val_2, type_1, type_2, array_1, array_2): ( u64, u64, MyType, MyType, [u64; 2], [u64; 2], ) = multi_call_handler.call().await?.value; assert_eq!(val_1, 5); assert_eq!(val_2, 6); assert_eq!(type_1, my_type_1); assert_eq!(type_2, my_type_2); assert_eq!(array_1, [7; 2]); assert_eq!(array_2, [42; 2]); Ok(()) } #[tokio::test] async fn test_contract_call_fee_estimation() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let gas_limit = 800; let tolerance = Some(0.2); let block_horizon = Some(1); let expected_script_gas = 800; let expected_total_gas = 8463; let expected_metered_bytes_size = 824; let estimated_transaction_cost = contract_instance .methods() .initialize_counter(42) .with_tx_policies(TxPolicies::default().with_script_gas_limit(gas_limit)) .estimate_transaction_cost(tolerance, block_horizon) .await?; assert_eq!(estimated_transaction_cost.script_gas, expected_script_gas); assert_eq!(estimated_transaction_cost.total_gas, expected_total_gas); assert_eq!( estimated_transaction_cost.metered_bytes_size, expected_metered_bytes_size ); Ok(()) } #[tokio::test] async fn contract_call_has_same_estimated_and_used_gas() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let tolerance = Some(0.0); let block_horizon = Some(1); let estimated_total_gas = contract_methods .initialize_counter(42) .estimate_transaction_cost(tolerance, block_horizon) .await? .total_gas; let gas_used = contract_methods .initialize_counter(42) .call() .await? .tx_status .total_gas; assert_eq!(estimated_total_gas, gas_used); Ok(()) } #[tokio::test] async fn mult_call_has_same_estimated_and_used_gas() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let call_handler_1 = contract_methods.initialize_counter(42); let call_handler_2 = contract_methods.get_array([42; 2]); let multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let tolerance = Some(0.0); let block_horizon = Some(1); let estimated_total_gas = multi_call_handler .estimate_transaction_cost(tolerance, block_horizon) .await? .total_gas; let total_gas = multi_call_handler .call::<(u64, [u64; 2])>() .await? .tx_status .total_gas; assert_eq!(estimated_total_gas, total_gas); Ok(()) } #[tokio::test] async fn contract_method_call_respects_maturity_and_expiration() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "BlockHeightContract", project = "e2e/sway/contracts/transaction_block_height" )), Deploy( name = "contract_instance", contract = "BlockHeightContract", wallet = "wallet", random_salt = false, ), ); let provider = wallet.provider(); let maturity = 10; let expiration = 20; let call_handler = contract_instance .methods() .calling_this_will_produce_a_block() .with_tx_policies( TxPolicies::default() .with_maturity(maturity) .with_expiration(expiration), ); { let err = call_handler .clone() .call() .await .expect_err("maturity not reached"); assert!(err.to_string().contains("TransactionMaturity")); } { provider.produce_blocks(15, None).await?; call_handler .clone() .call() .await .expect("should succeed. Block height between `maturity` and `expiration`"); } { provider.produce_blocks(15, None).await?; let err = call_handler.call().await.expect_err("expiration reached"); assert!(err.to_string().contains("TransactionExpiration")); } Ok(()) } #[tokio::test] async fn test_auth_msg_sender_from_sdk() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "AuthContract", project = "e2e/sway/contracts/auth_testing_contract" )), Deploy( name = "contract_instance", contract = "AuthContract", wallet = "wallet", random_salt = false, ), ); // Contract returns true if `msg_sender()` matches `wallet.address()`. let response = contract_instance .methods() .check_msg_sender(wallet.address()) .call() .await?; assert!(response.value); Ok(()) } #[tokio::test] async fn test_large_return_data() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/large_return_data" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let res = contract_methods.get_id().call().await?; assert_eq!( res.value.0, [ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255 ] ); // One word-sized string let res = contract_methods.get_small_string().call().await?; assert_eq!(res.value, "gggggggg"); // Two word-sized string let res = contract_methods.get_large_string().call().await?; assert_eq!(res.value, "ggggggggg"); // Large struct will be bigger than a `WORD`. let res = contract_methods.get_large_struct().call().await?; assert_eq!(res.value.foo, 12); assert_eq!(res.value.bar, 42); // Array will be returned in `ReturnData`. let res = contract_methods.get_large_array().call().await?; assert_eq!(res.value, [1, 2]); let res = contract_methods.get_contract_id().call().await?; // First `value` is from `CallResponse`. // Second `value` is from the `ContractId` type. assert_eq!( res.value, ContractId::from([ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255 ]) ); Ok(()) } #[tokio::test] async fn can_handle_function_called_new() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let response = contract_instance.methods().new().call().await?.value; assert_eq!(response, 12345); Ok(()) } #[tokio::test] async fn test_contract_setup_macro_deploy_with_salt() -> Result<()> { // ANCHOR: contract_setup_macro_multi setup_program_test!( Wallets("wallet"), Abigen( Contract( name = "LibContract", project = "e2e/sway/contracts/lib_contract" ), Contract( name = "LibContractCaller", project = "e2e/sway/contracts/lib_contract_caller" ), ), Deploy( name = "lib_contract_instance", contract = "LibContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_caller_instance", contract = "LibContractCaller", wallet = "wallet", ), Deploy( name = "contract_caller_instance2", contract = "LibContractCaller", wallet = "wallet", ), ); let lib_contract_id = lib_contract_instance.contract_id(); let contract_caller_id = contract_caller_instance.contract_id(); let contract_caller_id2 = contract_caller_instance2.contract_id(); // Because we deploy with salt, we can deploy the same contract multiple times assert_ne!(contract_caller_id, contract_caller_id2); // The first contract can be called because they were deployed on the same provider let response = contract_caller_instance .methods() .increment_from_contract(lib_contract_id, 42) .with_contracts(&[&lib_contract_instance]) .call() .await?; assert_eq!(43, response.value); let response = contract_caller_instance2 .methods() .increment_from_contract(lib_contract_id, 42) .with_contracts(&[&lib_contract_instance]) .call() .await?; assert_eq!(43, response.value); // ANCHOR_END: contract_setup_macro_multi Ok(()) } #[tokio::test] async fn test_wallet_getter() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); assert_eq!(contract_instance.account().address(), wallet.address()); //`contract_id()` is tested in // async fn test_contract_calling_contract() -> Result<()> { Ok(()) } #[tokio::test] async fn test_connect_wallet() -> Result<()> { // ANCHOR: contract_setup_macro_manual_wallet let config = WalletsConfig::new(Some(2), Some(1), Some(DEFAULT_COIN_AMOUNT)); let mut wallets = launch_custom_provider_and_get_wallets(config, None, None).await?; let wallet = wallets.pop().unwrap(); let wallet_2 = wallets.pop().unwrap(); setup_program_test!( Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); // ANCHOR_END: contract_setup_macro_manual_wallet // pay for call with wallet let tx_policies = TxPolicies::default() .with_tip(100) .with_script_gas_limit(1_000_000); contract_instance .methods() .initialize_counter(42) .with_tx_policies(tx_policies) .call() .await?; // confirm that funds have been deducted let wallet_balance = wallet.get_asset_balance(&Default::default()).await?; assert!(DEFAULT_COIN_AMOUNT as u128 > wallet_balance); // pay for call with wallet_2 contract_instance .with_account(wallet_2.clone()) .methods() .initialize_counter(42) .with_tx_policies(tx_policies) .call() .await?; // confirm there are no changes to wallet, wallet_2 has been charged let wallet_balance_second_call = wallet.get_asset_balance(&Default::default()).await?; let wallet_2_balance = wallet_2.get_asset_balance(&Default::default()).await?; assert_eq!(wallet_balance_second_call, wallet_balance); assert!(DEFAULT_COIN_AMOUNT as u128 > wallet_2_balance); Ok(()) } async fn setup_output_variable_estimation_test() -> Result<(Vec<Wallet>, [Identity; 3], AssetId, ContractId)> { let wallet_config = WalletsConfig::new(Some(3), None, None); let wallets = launch_custom_provider_and_get_wallets(wallet_config, None, None).await?; let contract_id = Contract::load_from( "sway/contracts/token_ops/out/release/token_ops.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&wallets[0], TxPolicies::default()) .await? .contract_id; let mint_asset_id = contract_id.asset_id(&SubAssetId::zeroed()); let addresses = wallets .iter() .map(|wallet| wallet.address().into()) .collect::<Vec<_>>() .try_into() .unwrap(); Ok((wallets, addresses, mint_asset_id, contract_id)) } #[tokio::test] async fn test_output_variable_estimation() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/token_ops/out/release/token_ops-abi.json" )); let (wallets, addresses, mint_asset_id, contract_id) = setup_output_variable_estimation_test().await?; let contract_instance = MyContract::new(contract_id, wallets[0].clone()); let contract_methods = contract_instance.methods(); let amount = 1000; { // Should fail due to lack of output variables let response = contract_methods .mint_to_addresses(amount, addresses) .call() .await; assert!(matches!( response, Err(Error::Transaction(Reason::Failure { .. })) )); } { // Should add 3 output variables automatically let _ = contract_methods .mint_to_addresses(amount, addresses) .with_variable_output_policy(VariableOutputPolicy::EstimateMinimum) .call() .await?; for wallet in wallets.iter() { let balance = wallet.get_asset_balance(&mint_asset_id).await?; assert_eq!(balance, amount as u128); } } Ok(()) } #[tokio::test] async fn test_output_variable_estimation_multicall() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/token_ops/out/release/token_ops-abi.json" )); let (wallets, addresses, mint_asset_id, contract_id) = setup_output_variable_estimation_test().await?; let contract_instance = MyContract::new(contract_id, wallets[0].clone()); let contract_methods = contract_instance.methods(); const NUM_OF_CALLS: u64 = 3; let amount = 1000; let total_amount = amount * NUM_OF_CALLS; let mut multi_call_handler = CallHandler::new_multi_call(wallets[0].clone()); for _ in 0..NUM_OF_CALLS { let call_handler = contract_methods.mint_to_addresses(amount, addresses); multi_call_handler = multi_call_handler.add_call(call_handler); } wallets[0] .force_transfer_to_contract( contract_id, total_amount, AssetId::zeroed(), TxPolicies::default(), ) .await .unwrap(); let base_layer_address = Bits256([1u8; 32]); let call_handler = contract_methods.send_message(base_layer_address, amount); multi_call_handler = multi_call_handler.add_call(call_handler); let _ = multi_call_handler .with_variable_output_policy(VariableOutputPolicy::EstimateMinimum) .call::<((), (), ())>() .await?; for wallet in wallets.iter() { let balance = wallet.get_asset_balance(&mint_asset_id).await?; assert_eq!(balance, 3 * amount as u128); } Ok(()) } #[tokio::test] async fn test_contract_instance_get_balances() -> Result<()> { let mut rng = thread_rng(); let signer = PrivateKeySigner::random(&mut rng); let (coins, asset_ids) = setup_multiple_assets_coins(signer.address(), 2, 4, 8); let random_asset_id = asset_ids[1]; let provider = setup_test_provider(coins.clone(), vec![], None, None).await?; let wallet = Wallet::new(signer, provider.clone()); setup_program_test!( Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let contract_id = contract_instance.contract_id(); // Check the current balance of the contract with id 'contract_id' let contract_balances = contract_instance.get_balances().await?; assert!(contract_balances.is_empty()); // Transfer an amount to the contract let amount = 8; wallet .force_transfer_to_contract(contract_id, amount, random_asset_id, TxPolicies::default()) .await?; // Check that the contract now has 1 coin let contract_balances = contract_instance.get_balances().await?; assert_eq!(contract_balances.len(), 1); let random_asset_balance = contract_balances.get(&random_asset_id).unwrap(); assert_eq!(*random_asset_balance, amount); Ok(()) } #[tokio::test] async fn contract_call_futures_implement_send() -> Result<()> { use std::future::Future; fn tokio_spawn_imitation<T>(_: T) where T: Future + Send + 'static, { } setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); tokio_spawn_imitation(async move { contract_instance .methods() .initialize_counter(42) .call() .await .unwrap(); }); Ok(()) } #[tokio::test] async fn test_contract_set_estimation() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen( Contract( name = "LibContract", project = "e2e/sway/contracts/lib_contract" ), Contract( name = "LibContractCaller", project = "e2e/sway/contracts/lib_contract_caller" ), ), Deploy( name = "lib_contract_instance", contract = "LibContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_caller_instance", contract = "LibContractCaller", wallet = "wallet", random_salt = false, ), ); let lib_contract_id = lib_contract_instance.contract_id(); let res = lib_contract_instance.methods().increment(42).call().await?; assert_eq!(43, res.value); { // Should fail due to missing external contracts let res = contract_caller_instance .methods() .increment_from_contract(lib_contract_id, 42) .call() .await; assert!(matches!( res, Err(Error::Transaction(Reason::Failure { .. })) )); } let res = contract_caller_instance .methods() .increment_from_contract(lib_contract_id, 42) .determine_missing_contracts() .await? .call() .await?; assert_eq!(43, res.value); Ok(()) } #[tokio::test] async fn test_output_variable_contract_id_estimation_multicall() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen( Contract( name = "LibContract", project = "e2e/sway/contracts/lib_contract" ), Contract( name = "LibContractCaller", project = "e2e/sway/contracts/lib_contract_caller" ), Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" ), ), Deploy( name = "lib_contract_instance", contract = "LibContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_caller_instance", contract = "LibContractCaller", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_test_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let lib_contract_id = lib_contract_instance.contract_id(); let contract_methods = contract_caller_instance.methods(); let mut multi_call_handler = CallHandler::new_multi_call(wallet.clone()).with_tx_policies(Default::default()); for _ in 0..3 { let call_handler = contract_methods.increment_from_contract(lib_contract_id, 42); multi_call_handler = multi_call_handler.add_call(call_handler); } // add call that does not need ContractId let contract_methods = contract_test_instance.methods(); let call_handler = contract_methods.get(5, 6); multi_call_handler = multi_call_handler.add_call(call_handler); let call_response = multi_call_handler .determine_missing_contracts() .await? .call::<(u64, u64, u64, u64)>() .await?; assert_eq!(call_response.value, (43, 43, 43, 11)); Ok(()) } #[tokio::test] async fn test_contract_call_with_non_default_max_input() -> Result<()> { use fuels::{ tx::{ConsensusParameters, TxParameters}, types::coin::Coin, }; let mut consensus_parameters = ConsensusParameters::default(); let tx_params = TxParameters::default() .with_max_inputs(123) .with_max_size(1_000_000); consensus_parameters.set_tx_params(tx_params); let contract_params = ContractParameters::default().with_contract_max_size(1_000_000); consensus_parameters.set_contract_params(contract_params); let mut rng = thread_rng(); let signer = PrivateKeySigner::random(&mut rng); let coins: Vec<Coin> = setup_single_asset_coins( signer.address(), Default::default(), DEFAULT_NUM_COINS, DEFAULT_COIN_AMOUNT, ); let chain_config = ChainConfig { consensus_parameters: consensus_parameters.clone(), ..ChainConfig::default() }; let provider = setup_test_provider(coins, vec![], None, Some(chain_config)).await?; let wallet = Wallet::new(signer, provider.clone()); assert_eq!(consensus_parameters, provider.consensus_parameters().await?); setup_program_test!( Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let response = contract_instance.methods().get(5, 6).call().await?; assert_eq!(response.value, 11); Ok(()) } #[tokio::test] async fn test_add_custom_assets() -> Result<()> { let initial_amount = 100_000; let asset_base = AssetConfig { id: AssetId::zeroed(), num_coins: 1, coin_amount: initial_amount, }; let asset_id_1 = AssetId::from([3u8; 32]); let asset_1 = AssetConfig { id: asset_id_1, num_coins: 1, coin_amount: initial_amount, }; let asset_id_2 = AssetId::from([1u8; 32]); let asset_2 = AssetConfig { id: asset_id_2, num_coins: 1, coin_amount: initial_amount, }; let assets = vec![asset_base, asset_1, asset_2]; let num_wallets = 2; let wallet_config = WalletsConfig::new_multiple_assets(num_wallets, assets); let mut wallets = launch_custom_provider_and_get_wallets(wallet_config, None, None).await?; let wallet_1 = wallets.pop().unwrap(); let wallet_2 = wallets.pop().unwrap(); setup_program_test!( Abigen(Contract( name = "MyContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "MyContract", wallet = "wallet_1", random_salt = false, ), );
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
true
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/from_token.rs
e2e/tests/from_token.rs
use fuels::{core::traits::Tokenizable, prelude::*, types::Token}; #[tokio::test] async fn create_struct_from_decoded_tokens() -> Result<()> { abigen!(Contract( name = "SimpleContract", abi = "e2e/sway/types/contracts/nested_structs/out/release/nested_structs-abi.json" )); let u32_token = Token::U32(10); let bool_token = Token::Bool(true); let struct_from_tokens = SomeStruct::from_token(Token::Struct(vec![u32_token, bool_token]))?; assert_eq!(10, struct_from_tokens.field); assert!(struct_from_tokens.field_2); Ok(()) } #[tokio::test] async fn create_nested_struct_from_decoded_tokens() -> Result<()> { abigen!(Contract( name = "SimpleContract", abi = "e2e/sway/types/contracts/nested_structs/out/release/nested_structs-abi.json" )); let u32_token = Token::U32(10); let bool_token = Token::Bool(true); let inner_struct_token = Token::Struct(vec![u32_token, bool_token]); let nested_struct_from_tokens = AllStruct::from_token(Token::Struct(vec![inner_struct_token]))?; assert_eq!(10, nested_struct_from_tokens.some_struct.field); assert!(nested_struct_from_tokens.some_struct.field_2); Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/wallets.rs
e2e/tests/wallets.rs
use fuels::{ accounts::signers::private_key::PrivateKeySigner, prelude::*, types::{coin_type::CoinType, input::Input, output::Output}, }; use rand::{Rng, thread_rng}; async fn assert_address_balance( address: &Address, provider: &Provider, asset_id: &AssetId, amount: u128, ) { let balance = provider .get_asset_balance(address, asset_id) .await .expect("Could not retrieve balance"); assert_eq!(balance, amount); } #[tokio::test] async fn test_wallet_balance_api_multi_asset() -> Result<()> { let signer = PrivateKeySigner::random(&mut rand::thread_rng()); let number_of_assets = 7; let coins_per_asset = 21; let amount_per_coin = 11; let (coins, asset_ids) = setup_multiple_assets_coins( signer.address(), number_of_assets, coins_per_asset, amount_per_coin, ); let provider = setup_test_provider(coins.clone(), vec![], None, None).await?; let wallet = Wallet::new(signer, provider.clone()); let balances = wallet.get_balances().await?; assert_eq!(balances.len() as u64, number_of_assets); for asset_id in asset_ids { let balance = wallet.get_asset_balance(&asset_id).await?; assert_eq!(balance, (coins_per_asset * amount_per_coin) as u128); let expected_key = asset_id.to_string(); assert!(balances.contains_key(&expected_key)); assert_eq!( *balances.get(&expected_key).unwrap(), (coins_per_asset * amount_per_coin) as u128 ); } Ok(()) } #[tokio::test] async fn test_wallet_balance_api_single_asset() -> Result<()> { let signer = PrivateKeySigner::random(&mut rand::thread_rng()); let number_of_coins = 21; let amount_per_coin = 11; let coins = setup_single_asset_coins( signer.address(), AssetId::zeroed(), number_of_coins, amount_per_coin, ); let provider = setup_test_provider(coins.clone(), vec![], None, None).await?; let wallet = Wallet::new(signer, provider.clone()); for coin in coins { let balance = wallet.get_asset_balance(&coin.asset_id).await?; assert_eq!(balance, (number_of_coins * amount_per_coin) as u128); } let balances = wallet.get_balances().await?; let expected_key = AssetId::zeroed().to_string(); assert_eq!(balances.len(), 1); // only the base asset assert!(balances.contains_key(&expected_key)); assert_eq!( *balances.get(&expected_key).unwrap(), (number_of_coins * amount_per_coin) as u128 ); Ok(()) } fn base_asset_wallet_config(num_wallets: u64) -> WalletsConfig { let asset_configs = vec![AssetConfig { id: AssetId::zeroed(), num_coins: 20, coin_amount: 20, }]; WalletsConfig::new_multiple_assets(num_wallets, asset_configs) } #[tokio::test] async fn adjust_fee_empty_transaction() -> Result<()> { let wallet = launch_provider_and_get_wallet().await?; let mut tb = ScriptTransactionBuilder::prepare_transfer(vec![], vec![], TxPolicies::default()); assert!(tb.inputs().is_empty()); assert!(tb.outputs().is_empty()); wallet.add_witnesses(&mut tb)?; wallet.adjust_for_fee(&mut tb, 0).await?; assert!(!tb.inputs().is_empty(), "inputs should be added"); assert_eq!(tb.outputs().len(), 1, "output should be added"); let tx = tb.build(wallet.provider()).await?; let total_amount_inputs: u64 = tx.inputs().iter().map(|i| i.amount().unwrap()).sum(); assert!( total_amount_inputs > tx.max_fee().unwrap(), "amount should cover tx" ); let expected_outputs = vec![Output::change(wallet.address(), 0, AssetId::zeroed())]; assert_eq!(tx.outputs(), &expected_outputs); Ok(()) } #[tokio::test] async fn adjust_for_fee_with_message_data_input() -> Result<()> { let wallet_signer = PrivateKeySigner::random(&mut rand::thread_rng()); let receiver_signer = PrivateKeySigner::random(&mut rand::thread_rng()); let messages = setup_single_message( Address::default(), wallet_signer.address(), 100, 0.into(), vec![1, 2, 3], // has data ); let asset_id = AssetId::zeroed(); let coins = setup_single_asset_coins(wallet_signer.address(), asset_id, 1, 50); let provider = setup_test_provider(coins, vec![messages], None, None).await?; let wallet = Wallet::new(wallet_signer, provider.clone()); let receiver = Wallet::new(receiver_signer, provider.clone()); let amount_to_send = 14; let message = wallet.get_messages().await?.pop().unwrap(); let input = Input::resource_signed(CoinType::Message(message)); let outputs = wallet.get_asset_outputs_for_amount(receiver.address(), asset_id, amount_to_send); { // message with data as only input - without adjust for fee let mut tb = ScriptTransactionBuilder::prepare_transfer( vec![input.clone()], outputs.clone(), TxPolicies::default(), ); wallet.add_witnesses(&mut tb)?; let tx = tb.build(wallet.provider()).await?; let err = provider .send_transaction_and_await_commit(tx) .await .unwrap_err(); assert!(err.to_string().contains("Validity(NoSpendableInput)")); } { // message with data as only input - with adjust for fee let mut tb = ScriptTransactionBuilder::prepare_transfer( vec![input.clone()], outputs.clone(), TxPolicies::default(), ); wallet.adjust_for_fee(&mut tb, 0).await.unwrap(); wallet.add_witnesses(&mut tb)?; let tx = tb.build(wallet.provider()).await?; assert_eq!(receiver.get_asset_balance(&asset_id).await?, 0); provider .send_transaction_and_await_commit(tx) .await .unwrap(); assert_eq!( receiver.get_asset_balance(&asset_id).await?, amount_to_send as u128 ); } Ok(()) } #[tokio::test] async fn adjust_fee_resources_to_transfer_with_base_asset() -> Result<()> { let wallet = launch_provider_and_get_wallet().await?; let base_amount = 30; let base_asset_id = AssetId::zeroed(); let inputs = wallet .get_asset_inputs_for_amount(base_asset_id, base_amount.into(), None) .await?; let outputs = wallet.get_asset_outputs_for_amount(Address::zeroed(), base_asset_id, base_amount); let mut tb = ScriptTransactionBuilder::prepare_transfer(inputs, outputs, TxPolicies::default()); wallet.adjust_for_fee(&mut tb, base_amount.into()).await?; wallet.add_witnesses(&mut tb)?; let tx = tb.build(wallet.provider()).await?; let total_amount_inputs: u64 = tx.inputs().iter().map(|i| i.amount().unwrap()).sum(); assert!(total_amount_inputs > tx.max_fee().unwrap()); // can cover tx let expected_outputs = vec![ Output::coin(Address::zeroed(), base_amount, base_asset_id), Output::change(wallet.address(), 0, base_asset_id), ]; assert_eq!(tx.outputs(), &expected_outputs); Ok(()) } #[tokio::test] async fn test_transfer() -> Result<()> { let wallet_1_signer = PrivateKeySigner::random(&mut rand::thread_rng()); let wallet_2_signer = PrivateKeySigner::random(&mut rand::thread_rng()); let amount = 100; let num_coins = 1; let base_asset_id = AssetId::zeroed(); let mut coins_1 = setup_single_asset_coins(wallet_1_signer.address(), base_asset_id, num_coins, amount); let coins_2 = setup_single_asset_coins(wallet_2_signer.address(), base_asset_id, num_coins, amount); coins_1.extend(coins_2); let provider = setup_test_provider(coins_1, vec![], None, None).await?; let wallet_1 = Wallet::new(wallet_1_signer, provider.clone()); let wallet_2 = Wallet::new(wallet_2_signer, provider.clone()).lock(); let _ = wallet_1 .transfer( wallet_2.address(), amount / 2, Default::default(), TxPolicies::default(), ) .await .unwrap(); let wallet_2_coins = wallet_2.get_coins(base_asset_id).await.unwrap(); let wallet_2_balance = wallet_2.get_asset_balance(&base_asset_id).await?; assert_eq!(wallet_2_coins.len(), 2); assert_eq!(wallet_2_balance, (amount + amount / 2) as u128); Ok(()) } #[tokio::test] async fn send_transfer_transactions() -> Result<()> { let amount = 5; let (wallet_1, wallet_2) = setup_transfer_test(amount).await?; // Configure transaction policies let tip = 2; let script_gas_limit = 500_000; let maturity = 0; let tx_policies = TxPolicies::default() .with_tip(tip) .with_maturity(maturity) .with_script_gas_limit(script_gas_limit); // Transfer 1 from wallet 1 to wallet 2. let amount_to_send = 1; let base_asset_id = AssetId::zeroed(); let tx_id = wallet_1 .transfer( wallet_2.address(), amount_to_send, base_asset_id, tx_policies, ) .await? .tx_id; // Assert that the transaction was properly configured. let res = wallet_1 .try_provider()? .get_transaction_by_id(&tx_id) .await? .unwrap(); let script: ScriptTransaction = match res.transaction { TransactionType::Script(tx) => tx, _ => panic!("Received unexpected tx type!"), }; // Transfer scripts uses set `script_gas_limit` despite not having script code assert_eq!(script.gas_limit(), script_gas_limit); assert_eq!(script.maturity().unwrap(), maturity); let wallet_1_spendable_resources = wallet_1 .get_spendable_resources(base_asset_id, 1, None) .await?; let wallet_2_spendable_resources = wallet_2 .get_spendable_resources(base_asset_id, 1, None) .await?; let wallet_1_all_coins = wallet_1.get_coins(base_asset_id).await?; let wallet_2_all_coins = wallet_2.get_coins(base_asset_id).await?; // wallet_1 has now only one spent coin assert_eq!(wallet_1_spendable_resources.len(), 1); assert_eq!(wallet_1_all_coins.len(), 1); // Check that wallet two now has a coin. assert_eq!(wallet_2_all_coins.len(), 1); assert_eq!(wallet_2_spendable_resources.len(), 1); Ok(()) } #[tokio::test] async fn transfer_coins_with_change() -> Result<()> { const AMOUNT: u64 = 5; let (wallet_1, wallet_2) = setup_transfer_test(AMOUNT).await?; // Transfer 2 from wallet 1 to wallet 2. const SEND_AMOUNT: u64 = 2; let fee = wallet_1 .transfer( wallet_2.address(), SEND_AMOUNT, AssetId::zeroed(), TxPolicies::default(), ) .await? .tx_status .total_fee; let base_asset_id = AssetId::zeroed(); let wallet_1_final_coins = wallet_1 .get_spendable_resources(base_asset_id, 1, None) .await?; // Assert that we've sent 2 from wallet 1, resulting in an amount of 3 in wallet 1. let resulting_amount = wallet_1_final_coins.first().unwrap(); assert_eq!(resulting_amount.amount(), AMOUNT - SEND_AMOUNT - fee); let wallet_2_final_coins = wallet_2.get_coins(base_asset_id).await?; assert_eq!(wallet_2_final_coins.len(), 1); let total_amount: u64 = wallet_2_final_coins.iter().map(|c| c.amount).sum(); assert_eq!(total_amount, SEND_AMOUNT); Ok(()) } #[tokio::test] async fn test_wallet_get_coins() -> Result<()> { const AMOUNT: u64 = 1000; const NUM_COINS: u64 = 3; let addr = Address::zeroed(); let coins = setup_single_asset_coins(addr, AssetId::zeroed(), NUM_COINS, AMOUNT); let provider = setup_test_provider(coins, vec![], None, None).await?; let wallet = Wallet::new_locked(addr, provider.clone()); let consensus_parameters = provider.consensus_parameters().await?; let wallet_initial_coins = wallet .get_coins(*consensus_parameters.base_asset_id()) .await?; let total_amount: u64 = wallet_initial_coins.iter().map(|c| c.amount).sum(); assert_eq!(wallet_initial_coins.len(), NUM_COINS as usize); assert_eq!(total_amount, AMOUNT * NUM_COINS); Ok(()) } async fn setup_transfer_test(amount: u64) -> Result<(Wallet, Wallet)> { let wallet_1_signer = PrivateKeySigner::random(&mut rand::thread_rng()); let coins = setup_single_asset_coins(wallet_1_signer.address(), AssetId::zeroed(), 1, amount); let provider = setup_test_provider(coins, vec![], None, None).await?; let wallet_1 = Wallet::new(wallet_1_signer, provider.clone()); let wallet_2 = Wallet::random(&mut thread_rng(), provider.clone()); Ok((wallet_1, wallet_2)) } #[tokio::test] async fn transfer_more_than_owned() -> Result<()> { const AMOUNT: u64 = 1000000; let (wallet_1, wallet_2) = setup_transfer_test(AMOUNT).await?; // Transferring more than balance should fail. let response = wallet_1 .transfer( wallet_2.address(), AMOUNT * 2, Default::default(), TxPolicies::default(), ) .await; assert!(response.is_err()); let wallet_2_coins = wallet_2.get_coins(AssetId::zeroed()).await?; assert_eq!(wallet_2_coins.len(), 0); Ok(()) } #[tokio::test] async fn transfer_coins_of_non_base_asset() -> Result<()> { const AMOUNT: u64 = 10000; let wallet_1_signer = PrivateKeySigner::random(&mut rand::thread_rng()); let asset_id: AssetId = AssetId::from([1; 32usize]); let mut coins = setup_single_asset_coins(wallet_1_signer.address(), asset_id, 1, AMOUNT); // setup base asset coins to pay tx fees let base_coins = setup_single_asset_coins(wallet_1_signer.address(), AssetId::zeroed(), 1, AMOUNT); coins.extend(base_coins); let provider = setup_test_provider(coins, vec![], None, None).await?; let wallet_1 = Wallet::new(wallet_1_signer, provider.clone()); let wallet_2 = Wallet::random(&mut thread_rng(), provider.clone()); const SEND_AMOUNT: u64 = 200; let _ = wallet_1 .transfer( wallet_2.address(), SEND_AMOUNT, asset_id, TxPolicies::default(), ) .await?; let wallet_1_balance = wallet_1.get_asset_balance(&asset_id).await?; assert_eq!(wallet_1_balance, (AMOUNT - SEND_AMOUNT) as u128); let wallet_2_final_coins = wallet_2.get_coins(asset_id).await?; assert_eq!(wallet_2_final_coins.len(), 1); let total_amount: u64 = wallet_2_final_coins.iter().map(|c| c.amount).sum(); assert_eq!(total_amount, SEND_AMOUNT); Ok(()) } #[tokio::test] async fn test_transfer_with_multiple_signatures() -> Result<()> { let wallet_config = base_asset_wallet_config(5); let wallets = launch_custom_provider_and_get_wallets(wallet_config, None, None).await?; let provider = wallets[0].try_provider()?; let receiver = Wallet::random(&mut thread_rng(), provider.clone()); let amount_to_transfer = 20u64; let mut inputs = vec![]; let consensus_parameters = provider.consensus_parameters().await?; for wallet in &wallets { inputs.extend( wallet .get_asset_inputs_for_amount( *consensus_parameters.base_asset_id(), amount_to_transfer.into(), None, ) .await?, ); } let amount_to_receive = amount_to_transfer * wallets.len() as u64; // all change goes to the first wallet let outputs = wallets[0].get_asset_outputs_for_amount( receiver.address(), *consensus_parameters.base_asset_id(), amount_to_receive, ); let mut tb = ScriptTransactionBuilder::prepare_transfer(inputs, outputs, TxPolicies::default()); for wallet in wallets.iter() { wallet.add_witnesses(&mut tb)? } let tx = tb.build(provider).await?; provider.send_transaction_and_await_commit(tx).await?; assert_eq!( receiver .get_asset_balance(consensus_parameters.base_asset_id()) .await?, amount_to_receive as u128, ); Ok(()) } #[tokio::test] async fn wallet_transfer_respects_maturity_and_expiration() -> Result<()> { let wallet = launch_provider_and_get_wallet().await?; let asset_id = AssetId::zeroed(); let wallet_balance = wallet.get_asset_balance(&asset_id).await?; let provider = wallet.provider(); let receiver: Address = thread_rng().r#gen(); let maturity = 10; let expiration = 20; let tx_policies = TxPolicies::default() .with_maturity(maturity) .with_expiration(expiration); let amount_to_send = 10; { let err = wallet .transfer(receiver, amount_to_send, asset_id, tx_policies) .await .expect_err("maturity not reached"); assert!(err.to_string().contains("TransactionMaturity")); } let transaction_fee = { provider.produce_blocks(15, None).await?; wallet .transfer(receiver, amount_to_send, asset_id, tx_policies) .await .expect("should succeed. Block height between `maturity` and `expiration`") .tx_status .total_fee }; { provider.produce_blocks(15, None).await?; let err = wallet .transfer(receiver, amount_to_send, asset_id, tx_policies) .await .expect_err("expiration reached"); assert!(err.to_string().contains("TransactionExpiration")); } // Wallet has spent the funds assert_address_balance( &wallet.address(), provider, &asset_id, wallet_balance - amount_to_send as u128 - transaction_fee as u128, ) .await; // Funds were transferred assert_address_balance(&receiver, provider, &asset_id, amount_to_send as u128).await; Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/configurables.rs
e2e/tests/configurables.rs
use fuels::{ core::codec::EncoderConfig, prelude::*, types::{Bits256, SizedAsciiString, U256}, }; #[tokio::test] async fn contract_default_configurables() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/configurables/out/release/configurables-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let contract_id = Contract::load_from( "sway/contracts/configurables/out/release/configurables.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let contract_instance = MyContract::new(contract_id, wallet.clone()); let response = contract_instance .methods() .return_configurables() .call() .await?; let expected_value = ( true, 8, 16, 32, 63, U256::from(8), Bits256([1; 32]), "fuel".try_into()?, (8, true), [253, 254, 255], StructWithGeneric { field_1: 8u8, field_2: 16, }, EnumWithGeneric::VariantOne(true), ); assert_eq!(response.value, expected_value); Ok(()) } #[tokio::test] async fn script_default_configurables() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "MyScript", project = "e2e/sway/scripts/script_configurables" )), LoadScript( name = "script_instance", script = "MyScript", wallet = "wallet" ) ); let mut script_instance = script_instance; script_instance.convert_into_loader().await?; let response = script_instance.main().call().await?; let expected_value = ( true, 8, 16, 32, 63, U256::from(8), Bits256([1; 32]), "fuel".try_into()?, (8, true), [253, 254, 255], StructWithGeneric { field_1: 8u8, field_2: 16, }, EnumWithGeneric::VariantOne(true), ); assert_eq!(response.value, expected_value); Ok(()) } #[tokio::test] async fn contract_configurables() -> Result<()> { // ANCHOR: contract_configurables abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/configurables/out/release/configurables-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let str_4: SizedAsciiString<4> = "FUEL".try_into()?; let new_struct = StructWithGeneric { field_1: 16u8, field_2: 32, }; let new_enum = EnumWithGeneric::VariantTwo; let configurables = MyContractConfigurables::default() .with_BOOL(false)? .with_U8(7)? .with_U16(15)? .with_U32(31)? .with_U64(63)? .with_U256(U256::from(8))? .with_B256(Bits256([2; 32]))? .with_STR_4(str_4.clone())? .with_TUPLE((7, false))? .with_ARRAY([252, 253, 254])? .with_STRUCT(new_struct.clone())? .with_ENUM(new_enum.clone())?; let contract_id = Contract::load_from( "sway/contracts/configurables/out/release/configurables.bin", LoadConfiguration::default().with_configurables(configurables), )? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let contract_instance = MyContract::new(contract_id, wallet.clone()); // ANCHOR_END: contract_configurables let response = contract_instance .methods() .return_configurables() .call() .await?; let expected_value = ( false, 7, 15, 31, 63, U256::from(8), Bits256([2; 32]), str_4, (7, false), [252, 253, 254], new_struct, new_enum, ); assert_eq!(response.value, expected_value); Ok(()) } #[tokio::test] async fn contract_manual_configurables() -> Result<()> { setup_program_test!( Abigen(Contract( name = "MyContract", project = "e2e/sway/contracts/configurables" )), Wallets("wallet") ); let str_4: SizedAsciiString<4> = "FUEL".try_into()?; let new_struct = StructWithGeneric { field_1: 16u8, field_2: 32, }; let new_enum = EnumWithGeneric::VariantTwo; let configurables = MyContractConfigurables::default() .with_BOOL(false)? .with_U8(7)? .with_U16(15)? .with_U32(31)? .with_U64(63)? .with_U256(U256::from(8))? .with_B256(Bits256([2; 32]))? .with_STR_4(str_4.clone())? .with_TUPLE((7, false))? .with_ARRAY([252, 253, 254])? .with_STRUCT(new_struct.clone())? .with_ENUM(new_enum.clone())?; let contract_id = Contract::load_from( "sway/contracts/configurables/out/release/configurables.bin", LoadConfiguration::default(), )? .with_configurables(configurables) .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let contract_instance = MyContract::new(contract_id, wallet.clone()); let response = contract_instance .methods() .return_configurables() .call() .await?; let expected_value = ( false, 7, 15, 31, 63, U256::from(8), Bits256([2; 32]), str_4, (7, false), [252, 253, 254], new_struct, new_enum, ); assert_eq!(response.value, expected_value); Ok(()) } #[tokio::test] async fn script_configurables() -> Result<()> { // ANCHOR: script_configurables abigen!(Script( name = "MyScript", abi = "e2e/sway/scripts/script_configurables/out/release/script_configurables-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let bin_path = "sway/scripts/script_configurables/out/release/script_configurables.bin"; let instance = MyScript::new(wallet, bin_path); let str_4: SizedAsciiString<4> = "FUEL".try_into()?; let new_struct = StructWithGeneric { field_1: 16u8, field_2: 32, }; let new_enum = EnumWithGeneric::VariantTwo; let configurables = MyScriptConfigurables::new(EncoderConfig { max_tokens: 5, ..Default::default() }) .with_BOOL(false)? .with_U8(7)? .with_U16(15)? .with_U32(31)? .with_U64(63)? .with_U256(U256::from(8))? .with_B256(Bits256([2; 32]))? .with_STR_4(str_4.clone())? .with_TUPLE((7, false))? .with_ARRAY([252, 253, 254])? .with_STRUCT(new_struct.clone())? .with_ENUM(new_enum.clone())?; let response = instance .with_configurables(configurables) .main() .call() .await?; // ANCHOR_END: script_configurables let expected_value = ( false, 7, 15, 31, 63, U256::from(8), Bits256([2; 32]), str_4, (7, false), [252, 253, 254], new_struct, new_enum, ); assert_eq!(response.value, expected_value); Ok(()) } #[tokio::test] async fn configurable_encoder_config_is_applied() { abigen!(Script( name = "MyScript", abi = "e2e/sway/scripts/script_configurables/out/release/script_configurables-abi.json" )); let new_struct = StructWithGeneric { field_1: 16u8, field_2: 32, }; { let _configurables = MyScriptConfigurables::default() .with_STRUCT(new_struct.clone()) .expect("no encoder config, it works"); } { let encoder_config = EncoderConfig { max_tokens: 1, ..Default::default() }; // Fails when a wrong encoder config is set let configurables_error = MyScriptConfigurables::new(encoder_config) .with_STRUCT(new_struct) .expect_err("should error"); assert!( configurables_error .to_string() .contains("token limit `1` reached while encoding. Try increasing it"), ) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/types_predicates.rs
e2e/tests/types_predicates.rs
use std::{default::Default, path::Path}; use fuels::{ accounts::{Account, predicate::Predicate, signers::private_key::PrivateKeySigner}, prelude::*, types::{AssetId, Bits256, U256, coin::Coin, message::Message}, }; async fn assert_predicate_spendable(data: Vec<u8>, project_path: impl AsRef<Path>) -> Result<()> { let binary_path = project_binary(project_path); let mut predicate: Predicate = Predicate::load_from(&binary_path)?.with_data(data); let num_coins = 4; let num_messages = 8; let amount = 16; let (provider, predicate_balance, receiver, receiver_balance, asset_id) = setup_predicate_test(predicate.address(), num_coins, num_messages, amount).await?; predicate.set_provider(provider.clone()); let amount_to_send = 136; let fee = predicate .transfer( receiver.address(), amount_to_send, asset_id, TxPolicies::default(), ) .await? .tx_status .total_fee; // The predicate has spent the funds assert_address_balance( &predicate.address(), &provider, &asset_id, predicate_balance - amount_to_send - fee, ) .await; // Funds were transferred assert_address_balance( &receiver.address(), &provider, &asset_id, receiver_balance + amount_to_send, ) .await; Ok(()) } fn project_binary(project_root: impl AsRef<Path>) -> String { let project_root = project_root.as_ref(); let project_name = project_root .file_name() .expect("Couldn't extract project name") .to_str() .unwrap(); project_root .join(format!("out/release/{project_name}.bin")) .display() .to_string() } async fn assert_address_balance( address: &Address, provider: &Provider, asset_id: &AssetId, amount: u64, ) { let balance = provider .get_asset_balance(address, asset_id) .await .expect("Could not retrieve balance"); assert_eq!(balance, amount as u128); } fn get_test_coins_and_messages( address: Address, num_coins: u64, num_messages: u64, amount: u64, ) -> (Vec<Coin>, Vec<Message>, AssetId) { let asset_id = AssetId::zeroed(); let coins = setup_single_asset_coins(address, asset_id, num_coins, amount); let messages = (0..num_messages) .map(|i| setup_single_message(Address::default(), address, amount, i.into(), vec![])) .collect(); (coins, messages, asset_id) } // Setup function used to assign coins and messages to a predicate address // and create a `receiver` wallet async fn setup_predicate_test( predicate_address: Address, num_coins: u64, num_messages: u64, amount: u64, ) -> Result<(Provider, u64, Wallet, u64, AssetId)> { let receiver_num_coins = 1; let receiver_amount = 1; let receiver_balance = receiver_num_coins * receiver_amount; let predicate_balance = (num_coins + num_messages) * amount; let receiver_signer = PrivateKeySigner::random(&mut rand::thread_rng()); let (mut coins, messages, asset_id) = get_test_coins_and_messages(predicate_address, num_coins, num_messages, amount); coins.extend(setup_single_asset_coins( receiver_signer.address(), asset_id, receiver_num_coins, receiver_amount, )); let node_config = NodeConfig { starting_gas_price: 0, ..Default::default() }; let provider = setup_test_provider(coins, messages, Some(node_config), None).await?; let receiver = Wallet::new(receiver_signer, provider.clone()); Ok(( provider, predicate_balance, receiver, receiver_balance, asset_id, )) } #[tokio::test] async fn spend_predicate_coins_messages_single_u64() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/u64/out/release/u64-abi.json" )); let data = MyPredicateEncoder::default().encode_data(32768)?; assert_predicate_spendable(data, "sway/types/predicates/u64").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_address() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/address/out/release/address-abi.json" )); let addr: Address = "0xef86afa9696cf0dc6385e2c407a6e159a1103cefb7e2ae0636fb33d3cb2a9e4a".parse()?; let data = MyPredicateEncoder::default().encode_data(addr)?; assert_predicate_spendable(data, "sway/types/predicates/address").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_enums() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/enums/out/release/enums-abi.json" )); let data = MyPredicateEncoder::default().encode_data(TestEnum::A(32), AnotherTestEnum::B(32))?; assert_predicate_spendable(data, "sway/types/predicates/enums").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_structs() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/structs/out/release/structs-abi.json" )); let data = MyPredicateEncoder::default().encode_data( TestStruct { value: 192 }, AnotherTestStruct { value: 64, number: 128, }, )?; assert_predicate_spendable(data, "sway/types/predicates/structs").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_tuple() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_tuples/out/release/predicate_tuples-abi.json" )); let data = MyPredicateEncoder::default() .encode_data((16, TestStruct { value: 32 }, TestEnum::Value(64)), 128)?; assert_predicate_spendable(data, "sway/types/predicates/predicate_tuples").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_vector() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_vector/out/release/predicate_vector-abi.json" )); let data = MyPredicateEncoder::default().encode_data(18, 24, vec![2, 4, 42])?; assert_predicate_spendable(data, "sway/types/predicates/predicate_vector").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_vectors() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_vectors/out/release/predicate_vectors-abi.json" )); let u32_vec = vec![0, 4, 3]; let vec_in_vec = vec![vec![0, 2, 2], vec![0, 1, 2]]; let struct_in_vec = vec![SomeStruct { a: 8 }, SomeStruct { a: 1 }]; let vec_in_struct = SomeStruct { a: vec![0, 16, 2] }; let array_in_vec = vec![[0u64, 1u64], [32u64, 1u64]]; let vec_in_enum = SomeEnum::A(vec![0, 1, 128]); let enum_in_vec = vec![SomeEnum::A(0), SomeEnum::A(16)]; let b256_in_vec = vec![Bits256([2; 32]), Bits256([2; 32])]; let tuple_in_vec = vec![(0, 0), (128, 1)]; let vec_in_tuple = (vec![0, 64, 2], vec![0, 1, 2]); let vec_in_a_vec_in_a_struct_in_a_vec = vec![ SomeStruct { a: vec![vec![0, 1, 2], vec![3, 4, 5]], }, SomeStruct { a: vec![vec![6, 7, 8], vec![9, 32, 11]], }, ]; let vec_in_array = [vec![0, 64, 2], vec![0, 1, 2]]; let data = MyPredicateEncoder::default().encode_data( u32_vec, vec_in_vec, struct_in_vec, vec_in_struct, array_in_vec, vec_in_array, vec_in_enum, enum_in_vec, b256_in_vec, tuple_in_vec, vec_in_tuple, vec_in_a_vec_in_a_struct_in_a_vec, )?; assert_predicate_spendable(data, "sway/types/predicates/predicate_vectors").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_generics() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_generics/out/release/predicate_generics-abi.json" )); let data = MyPredicateEncoder::default().encode_data( GenericStruct { value: 64u8 }, GenericEnum::Generic(GenericStruct { value: 64u16 }), )?; assert_predicate_spendable(data, "sway/types/predicates/predicate_generics").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_bytes_hash() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_bytes_hash/out/release/predicate_bytes_hash-abi.json" )); let bytes = Bytes::from_hex_str( "0x75a448b91bb82a255757e61ba3eb7afe282c09842485268d4d72a027ec0cffc80500000000", )?; let bits256 = Bits256::from_hex_str( "0x173d69ea3d0aa050d01ff7cc60ccd4579b567c465cd115c6876c2da4a332fb99", )?; let data = MyPredicateEncoder::default().encode_data(bytes, bits256)?; assert_predicate_spendable(data, "sway/types/predicates/predicate_bytes_hash").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_bytes() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_bytes/out/release/predicate_bytes-abi.json" )); let bytes = Bytes(vec![40, 41, 42]); let wrapper = Wrapper { inner: vec![bytes.clone(), bytes.clone()], inner_enum: SomeEnum::Second(bytes), }; let data = MyPredicateEncoder::default().encode_data(wrapper)?; assert_predicate_spendable(data, "sway/types/predicates/predicate_bytes").await?; Ok(()) } #[tokio::test] async fn spend_predicate_coins_messages_raw_slice() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_raw_slice/out/release/predicate_raw_slice-abi.json" )); let raw_slice = RawSlice(vec![40, 41, 42]); let wrapper = Wrapper { inner: vec![raw_slice.clone(), raw_slice.clone()], inner_enum: SomeEnum::Second(raw_slice), }; let data = MyPredicateEncoder::default().encode_data(wrapper)?; assert_predicate_spendable(data, "sway/types/predicates/predicate_raw_slice").await?; Ok(()) } fn u128_from(parts: (u64, u64)) -> u128 { let bytes: [u8; 16] = [parts.0.to_be_bytes(), parts.1.to_be_bytes()] .concat() .try_into() .unwrap(); u128::from_be_bytes(bytes) } #[tokio::test] async fn predicate_handles_u128() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_u128/out/release/predicate_u128-abi.json" )); let data = MyPredicateEncoder::default().encode_data(u128_from((8, 2)))?; assert_predicate_spendable(data, "sway/types/predicates/predicate_u128").await?; Ok(()) } #[tokio::test] async fn predicate_handles_b256() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_b256/out/release/predicate_b256-abi.json" )); let data = MyPredicateEncoder::default().encode_data(Bits256([1; 32]))?; assert_predicate_spendable(data, "sway/types/predicates/predicate_b256").await?; Ok(()) } fn u256_from(parts: (u64, u64, u64, u64)) -> U256 { let bytes: [u8; 32] = [ parts.0.to_be_bytes(), parts.1.to_be_bytes(), parts.2.to_be_bytes(), parts.3.to_be_bytes(), ] .concat() .try_into() .unwrap(); U256::from(bytes) } #[tokio::test] async fn predicate_handles_u256() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_u256/out/release/predicate_u256-abi.json" )); let data = MyPredicateEncoder::default().encode_data(u256_from((10, 11, 12, 13)))?; assert_predicate_spendable(data, "sway/types/predicates/predicate_u256").await?; Ok(()) } #[tokio::test] async fn predicate_handles_std_string() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_std_lib_string/out/release/predicate_std_lib_string-abi.json" )); let data = MyPredicateEncoder::default().encode_data(10, 11, String::from("Hello World"))?; assert_predicate_spendable(data, "sway/types/predicates/predicate_std_lib_string").await?; Ok(()) } #[tokio::test] async fn predicate_string_slice() -> Result<()> { abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/types/predicates/predicate_string_slice/out/release/predicate_string_slice-abi.json" )); let data = MyPredicateEncoder::default().encode_data("predicate-input".try_into()?)?; assert_predicate_spendable(data, "sway/types/predicates/predicate_string_slice").await?; Ok(()) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/logs.rs
e2e/tests/logs.rs
use fuel_tx::SubAssetId; use fuels::{ core::codec::DecoderConfig, prelude::*, tx::ContractIdExt, types::{AsciiString, Bits256, SizedAsciiString, errors::transaction::Reason}, }; #[tokio::test] async fn parse_logged_variables() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "LogContract", project = "e2e/sway/logs/contract_logs" )), Deploy( name = "contract_instance", contract = "LogContract", wallet = "wallet", random_salt = false, ), ); // ANCHOR: produce_logs let contract_methods = contract_instance.methods(); let response = contract_methods.produce_logs_variables().call().await?; let log_u64 = response.decode_logs_with_type::<u64>()?; let log_bits256 = response.decode_logs_with_type::<Bits256>()?; let log_string = response.decode_logs_with_type::<SizedAsciiString<4>>()?; let log_array = response.decode_logs_with_type::<[u8; 3]>()?; let expected_bits256 = Bits256([ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74, ]); assert_eq!(log_u64, vec![64]); assert_eq!(log_bits256, vec![expected_bits256]); assert_eq!(log_string, vec!["Fuel"]); assert_eq!(log_array, vec![[1, 2, 3]]); // ANCHOR_END: produce_logs Ok(()) } #[tokio::test] async fn parse_logs_values() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "LogContract", project = "e2e/sway/logs/contract_logs" )), Deploy( name = "contract_instance", contract = "LogContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let response = contract_methods.produce_logs_values().call().await?; let log_u64 = response.decode_logs_with_type::<u64>()?; let log_u32 = response.decode_logs_with_type::<u32>()?; let log_u16 = response.decode_logs_with_type::<u16>()?; let log_u8 = response.decode_logs_with_type::<u8>()?; // try to retrieve non existent log let log_nonexistent = response.decode_logs_with_type::<bool>()?; assert_eq!(log_u64, vec![64]); assert_eq!(log_u32, vec![32]); assert_eq!(log_u16, vec![16]); assert_eq!(log_u8, vec![8]); assert!(log_nonexistent.is_empty()); Ok(()) } #[tokio::test] async fn parse_logs_custom_types() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "LogContract", project = "e2e/sway/logs/contract_logs" )), Deploy( name = "contract_instance", contract = "LogContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let response = contract_methods.produce_logs_custom_types().call().await?; let log_test_struct = response.decode_logs_with_type::<TestStruct>()?; let log_test_enum = response.decode_logs_with_type::<TestEnum>()?; let log_tuple = response.decode_logs_with_type::<(TestStruct, TestEnum)>()?; let expected_bits256 = Bits256([ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74, ]); let expected_struct = TestStruct { field_1: true, field_2: expected_bits256, field_3: 64, }; let expected_enum = TestEnum::VariantTwo; assert_eq!(log_test_struct, vec![expected_struct.clone()]); assert_eq!(log_test_enum, vec![expected_enum.clone()]); assert_eq!(log_tuple, vec![(expected_struct, expected_enum)]); Ok(()) } #[tokio::test] async fn parse_logs_generic_types() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "LogContract", project = "e2e/sway/logs/contract_logs" )), Deploy( name = "contract_instance", contract = "LogContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let response = contract_methods.produce_logs_generic_types().call().await?; let log_struct = response.decode_logs_with_type::<StructWithGeneric<[_; 3]>>()?; let log_enum = response.decode_logs_with_type::<EnumWithGeneric<[_; 3]>>()?; let log_struct_nested = response.decode_logs_with_type::<StructWithNestedGeneric<StructWithGeneric<[_; 3]>>>()?; let log_struct_deeply_nested = response.decode_logs_with_type::<StructDeeplyNestedGeneric< StructWithNestedGeneric<StructWithGeneric<[_; 3]>>, >>()?; let l = [1u8, 2u8, 3u8]; let expected_struct = StructWithGeneric { field_1: l, field_2: 64, }; let expected_enum = EnumWithGeneric::VariantOne(l); let expected_nested_struct = StructWithNestedGeneric { field_1: expected_struct.clone(), field_2: 64, }; let expected_deeply_nested_struct = StructDeeplyNestedGeneric { field_1: expected_nested_struct.clone(), field_2: 64, }; assert_eq!(log_struct, vec![expected_struct]); assert_eq!(log_enum, vec![expected_enum]); assert_eq!(log_struct_nested, vec![expected_nested_struct]); assert_eq!( log_struct_deeply_nested, vec![expected_deeply_nested_struct] ); Ok(()) } #[tokio::test] async fn decode_logs() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "LogContract", project = "e2e/sway/logs/contract_logs" )), Deploy( name = "contract_instance", contract = "LogContract", wallet = "wallet", random_salt = false, ), ); // ANCHOR: decode_logs let contract_methods = contract_instance.methods(); let response = contract_methods.produce_multiple_logs().call().await?; let logs = response.decode_logs(); // ANCHOR_END: decode_logs let expected_bits256 = Bits256([ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74, ]); let expected_struct = TestStruct { field_1: true, field_2: expected_bits256, field_3: 64, }; let expected_enum = TestEnum::VariantTwo; let expected_generic_struct = StructWithGeneric { field_1: expected_struct.clone(), field_2: 64, }; let expected_logs: Vec<String> = vec![ format!("{:?}", 64u64), format!("{:?}", 32u32), format!("{:?}", 16u16), format!("{:?}", 8u8), format!("{:?}", 64u64), format!("{expected_bits256:?}"), format!("{:?}", SizedAsciiString::<4>::new("Fuel".to_string())?), format!("{:?}", [1, 2, 3]), format!("{expected_struct:?}"), format!("{expected_enum:?}"), format!("{expected_generic_struct:?}"), ]; assert_eq!(expected_logs, logs.filter_succeeded()); Ok(()) } #[tokio::test] async fn decode_logs_with_no_logs() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let logs = contract_methods .initialize_counter(42) .call() .await? .decode_logs(); assert!(logs.filter_succeeded().is_empty()); Ok(()) } #[tokio::test] async fn multi_call_log_single_contract() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "LogContract", project = "e2e/sway/logs/contract_logs" )), Deploy( name = "contract_instance", contract = "LogContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let call_handler_1 = contract_methods.produce_logs_values(); let call_handler_2 = contract_methods.produce_logs_variables(); let multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let expected_logs: Vec<String> = vec![ format!("{:?}", 64u64), format!("{:?}", 32u32), format!("{:?}", 16u16), format!("{:?}", 8u8), format!("{:?}", 64u64), format!( "{:?}", Bits256([ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74, ]) ), format!("{:?}", SizedAsciiString::<4>::new("Fuel".to_string())?), format!("{:?}", [1, 2, 3]), ]; let logs = multi_call_handler.call::<((), ())>().await?.decode_logs(); assert_eq!(logs.filter_succeeded(), expected_logs); Ok(()) } #[tokio::test] async fn multi_call_log_multiple_contracts() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "LogContract", project = "e2e/sway/logs/contract_logs" )), Deploy( name = "contract_instance", contract = "LogContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_instance2", contract = "LogContract", wallet = "wallet", random_salt = false, ), ); let call_handler_1 = contract_instance.methods().produce_logs_values(); let call_handler_2 = contract_instance2.methods().produce_logs_variables(); let multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let expected_logs: Vec<String> = vec![ format!("{:?}", 64u64), format!("{:?}", 32u32), format!("{:?}", 16u16), format!("{:?}", 8u8), format!("{:?}", 64u64), format!( "{:?}", Bits256([ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74, ]) ), format!("{:?}", SizedAsciiString::<4>::new("Fuel".to_string())?), format!("{:?}", [1, 2, 3]), ]; let logs = multi_call_handler.call::<((), ())>().await?.decode_logs(); assert_eq!(logs.filter_succeeded(), expected_logs); Ok(()) } #[tokio::test] async fn multi_call_contract_with_contract_logs() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen( Contract(name = "MyContract", project = "e2e/sway/logs/contract_logs"), Contract( name = "ContractCaller", project = "e2e/sway/logs/contract_with_contract_logs" ) ), Deploy( name = "contract_instance", contract = "MyContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_caller_instance", contract = "ContractCaller", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_caller_instance2", contract = "ContractCaller", wallet = "wallet", random_salt = false, ), ); let call_handler_1 = contract_caller_instance .methods() .logs_from_external_contract(contract_instance.id()) .with_contracts(&[&contract_instance]); let call_handler_2 = contract_caller_instance2 .methods() .logs_from_external_contract(contract_instance.id()) .with_contracts(&[&contract_instance]); let multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let expected_logs: Vec<String> = vec![ format!("{:?}", 64), format!("{:?}", 32), format!("{:?}", 16), format!("{:?}", 8), format!("{:?}", 64), format!("{:?}", 32), format!("{:?}", 16), format!("{:?}", 8), ]; let logs = multi_call_handler.call::<((), ())>().await?.decode_logs(); assert_eq!(logs.filter_succeeded(), expected_logs); Ok(()) } fn assert_revert_containing_msg(msg: &str, error: Error) { let Error::Transaction(Reason::Failure { reason, .. }) = error else { panic!("error does not have the transaction failure variant"); }; assert!( reason.contains(msg), "message: \"{msg}\" not contained in reason: \"{reason}\"" ); } #[tokio::test] async fn revert_logs() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "RevertLogsContract", project = "e2e/sway/logs/contract_revert_logs" )), Deploy( name = "contract_instance", contract = "RevertLogsContract", wallet = "wallet", random_salt = false, ), ); macro_rules! reverts_with_msg { ($method:ident, call, $msg:expr) => { let error = contract_instance .methods() .$method() .call() .await .expect_err("should return a revert error"); assert_revert_containing_msg($msg, error); }; ($method:ident, simulate, $msg:expr) => { let error = contract_instance .methods() .$method() .simulate(Execution::realistic()) .await .expect_err("should return a revert error"); assert_revert_containing_msg($msg, error); }; } { reverts_with_msg!(require_primitive, call, "42"); reverts_with_msg!(require_primitive, simulate, "42"); reverts_with_msg!(require_string, call, "fuel"); reverts_with_msg!(require_string, simulate, "fuel"); reverts_with_msg!(require_custom_generic, call, "StructDeeplyNestedGeneric"); reverts_with_msg!( require_custom_generic, simulate, "StructDeeplyNestedGeneric" ); reverts_with_msg!(require_with_additional_logs, call, "64"); reverts_with_msg!(require_with_additional_logs, simulate, "64"); } { reverts_with_msg!(rev_w_log_primitive, call, "42"); reverts_with_msg!(rev_w_log_primitive, simulate, "42"); reverts_with_msg!(rev_w_log_string, call, "fuel"); reverts_with_msg!(rev_w_log_string, simulate, "fuel"); reverts_with_msg!(rev_w_log_custom_generic, call, "StructDeeplyNestedGeneric"); reverts_with_msg!( rev_w_log_custom_generic, simulate, "StructDeeplyNestedGeneric" ); } Ok(()) } #[tokio::test] async fn multi_call_revert_logs_single_contract() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "RevertLogsContract", project = "e2e/sway/logs/contract_revert_logs" )), Deploy( name = "contract_instance", contract = "RevertLogsContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); // The output of the error depends on the order of the contract // handlers as the script returns the first revert it finds. { let call_handler_1 = contract_methods.require_string(); let call_handler_2 = contract_methods.rev_w_log_custom_generic(); let mut multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let error = multi_call_handler .simulate::<((), ())>(Execution::realistic()) .await .expect_err("should return a revert error"); assert_revert_containing_msg("fuel", error); let error = multi_call_handler .call::<((), ())>() .await .expect_err("should return a revert error"); assert_revert_containing_msg("fuel", error); } { let call_handler_1 = contract_methods.require_custom_generic(); let call_handler_2 = contract_methods.rev_w_log_string(); let mut multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let error = multi_call_handler .simulate::<((), ())>(Execution::realistic()) .await .expect_err("should return a revert error"); assert_revert_containing_msg("StructDeeplyNestedGeneric", error); let error = multi_call_handler .call::<((), ())>() .await .expect_err("should return a revert error"); assert_revert_containing_msg("StructDeeplyNestedGeneric", error); } Ok(()) } #[tokio::test] async fn multi_call_revert_logs_multi_contract() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "RevertLogsContract", project = "e2e/sway/logs/contract_revert_logs" )), Deploy( name = "contract_instance", contract = "RevertLogsContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_instance2", contract = "RevertLogsContract", wallet = "wallet", random_salt = false, ), ); let contract_methods = contract_instance.methods(); let contract_methods2 = contract_instance2.methods(); // The output of the error depends on the order of the contract // handlers as the script returns the first revert it finds. { let call_handler_1 = contract_methods.require_string(); let call_handler_2 = contract_methods2.rev_w_log_custom_generic(); let mut multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let error = multi_call_handler .simulate::<((), ())>(Execution::realistic()) .await .expect_err("should return a revert error"); assert_revert_containing_msg("fuel", error); let error = multi_call_handler .call::<((), ())>() .await .expect_err("should return a revert error"); assert_revert_containing_msg("fuel", error); } { let call_handler_1 = contract_methods2.require_custom_generic(); let call_handler_2 = contract_methods.rev_w_log_string(); let mut multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let error = multi_call_handler .simulate::<((), ())>(Execution::realistic()) .await .expect_err("should return a revert error"); assert_revert_containing_msg("StructDeeplyNestedGeneric", error); let error = multi_call_handler .call::<((), ())>() .await .expect_err("should return a revert error"); assert_revert_containing_msg("StructDeeplyNestedGeneric", error); } Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn script_decode_logs() -> Result<()> { // ANCHOR: script_logs abigen!(Script( name = "LogScript", abi = "e2e/sway/logs/script_logs/out/release/script_logs-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let bin_path = "sway/logs/script_logs/out/release/script_logs.bin"; let instance = LogScript::new(wallet.clone(), bin_path); let response = instance.main().call().await?; let logs = response.decode_logs(); let log_u64 = response.decode_logs_with_type::<u64>()?; // ANCHOR_END: script_logs let l = [1u8, 2u8, 3u8]; let expected_bits256 = Bits256([ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74, ]); let expected_struct = TestStruct { field_1: true, field_2: expected_bits256, field_3: 64, }; let expected_enum = TestEnum::VariantTwo; let expected_tuple = (expected_struct.clone(), expected_enum.clone()); let expected_generic_struct = StructWithGeneric { field_1: expected_struct.clone(), field_2: 64, }; let expected_generic_enum = EnumWithGeneric::VariantOne(l); let expected_nested_struct = StructWithNestedGeneric { field_1: expected_generic_struct.clone(), field_2: 64, }; let expected_deeply_nested_struct = StructDeeplyNestedGeneric { field_1: expected_nested_struct.clone(), field_2: 64, }; let expected_logs: Vec<String> = vec![ format!("{:?}", 128u64), format!("{:?}", 32u32), format!("{:?}", 16u16), format!("{:?}", 8u8), format!("{:?}", 64u64), format!("{expected_bits256:?}"), format!("{:?}", SizedAsciiString::<4>::new("Fuel".to_string())?), format!("{:?}", [1, 2, 3]), format!("{expected_struct:?}"), format!("{expected_enum:?}"), format!("{expected_tuple:?}"), format!("{expected_generic_struct:?}"), format!("{expected_generic_enum:?}"), format!("{expected_nested_struct:?}"), format!("{expected_deeply_nested_struct:?}"), ]; assert_eq!(logs.filter_succeeded(), expected_logs); Ok(()) } #[tokio::test] async fn contract_with_contract_logs() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen( Contract(name = "MyContract", project = "e2e/sway/logs/contract_logs",), Contract( name = "ContractCaller", project = "e2e/sway/logs/contract_with_contract_logs", ) ), Deploy( name = "contract_instance", contract = "MyContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_caller_instance", contract = "ContractCaller", wallet = "wallet", random_salt = false, ) ); let expected_logs: Vec<String> = vec![ format!("{:?}", 64), format!("{:?}", 32), format!("{:?}", 16), format!("{:?}", 8), ]; let logs = contract_caller_instance .methods() .logs_from_external_contract(contract_instance.id()) .with_contracts(&[&contract_instance]) .call() .await? .decode_logs(); assert_eq!(expected_logs, logs.filter_succeeded()); Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn script_logs_with_contract_logs() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen( Contract(name = "MyContract", project = "e2e/sway/logs/contract_logs",), Script( name = "LogScript", project = "e2e/sway/logs/script_with_contract_logs" ) ), Deploy( name = "contract_instance", contract = "MyContract", wallet = "wallet", random_salt = false, ), LoadScript( name = "script_instance", script = "LogScript", wallet = "wallet" ) ); let expected_num_contract_logs = 4; let expected_script_logs: Vec<String> = vec![ // Contract logs format!("{:?}", 64), format!("{:?}", 32), format!("{:?}", 16), format!("{:?}", 8), // Script logs format!("{:?}", true), format!("{:?}", 42), format!("{:?}", SizedAsciiString::<4>::new("Fuel".to_string())?), format!("{:?}", [1, 2, 3]), ]; // ANCHOR: instance_to_contract_id let contract_id: ContractId = contract_instance.contract_id(); // ANCHOR_END: instance_to_contract_id // ANCHOR: external_contract_ids let response = script_instance .main(contract_id, MatchEnum::Logs) .with_contract_ids(&[contract_id]) .call() .await?; // ANCHOR_END: external_contract_ids // ANCHOR: external_contract let response = script_instance .main(contract_id, MatchEnum::Logs) .with_contracts(&[&contract_instance]) .call() .await?; // ANCHOR_END: external_contract { let num_contract_logs = response .tx_status .receipts .iter() .filter(|receipt| matches!(receipt, Receipt::LogData { id, .. } | Receipt::Log { id, .. } if *id == contract_id)) .count(); assert_eq!(num_contract_logs, expected_num_contract_logs); } { let logs = response.decode_logs(); assert_eq!(logs.filter_succeeded(), expected_script_logs); } Ok(()) } #[tokio::test] async fn script_decode_logs_with_type() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "LogScript", project = "e2e/sway/logs/script_logs" )), LoadScript( name = "script_instance", script = "LogScript", wallet = "wallet" ) ); let response = script_instance.main().call().await?; let l = [1u8, 2u8, 3u8]; let expected_bits256 = Bits256([ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74, ]); let expected_struct = TestStruct { field_1: true, field_2: expected_bits256, field_3: 64, }; let expected_enum = TestEnum::VariantTwo; let expected_generic_struct = StructWithGeneric { field_1: expected_struct.clone(), field_2: 64, }; let expected_generic_enum = EnumWithGeneric::VariantOne(l); let expected_nested_struct = StructWithNestedGeneric { field_1: expected_generic_struct.clone(), field_2: 64, }; let expected_deeply_nested_struct = StructDeeplyNestedGeneric { field_1: expected_nested_struct.clone(), field_2: 64, }; let log_u64 = response.decode_logs_with_type::<u64>()?; let log_u32 = response.decode_logs_with_type::<u32>()?; let log_u16 = response.decode_logs_with_type::<u16>()?; let log_u8 = response.decode_logs_with_type::<u8>()?; let log_struct = response.decode_logs_with_type::<TestStruct>()?; let log_enum = response.decode_logs_with_type::<TestEnum>()?; let log_generic_struct = response.decode_logs_with_type::<StructWithGeneric<TestStruct>>()?; let log_generic_enum = response.decode_logs_with_type::<EnumWithGeneric<[_; 3]>>()?; let log_nested_struct = response .decode_logs_with_type::<StructWithNestedGeneric<StructWithGeneric<TestStruct>>>()?; let log_deeply_nested_struct = response.decode_logs_with_type::<StructDeeplyNestedGeneric< StructWithNestedGeneric<StructWithGeneric<TestStruct>>, >>()?; // try to retrieve non existent log let log_nonexistent = response.decode_logs_with_type::<bool>()?; assert_eq!(log_u64, vec![128, 64]); assert_eq!(log_u32, vec![32]); assert_eq!(log_u16, vec![16]); assert_eq!(log_u8, vec![8]); assert_eq!(log_struct, vec![expected_struct]); assert_eq!(log_enum, vec![expected_enum]); assert_eq!(log_generic_struct, vec![expected_generic_struct]); assert_eq!(log_generic_enum, vec![expected_generic_enum]); assert_eq!(log_nested_struct, vec![expected_nested_struct]); assert_eq!( log_deeply_nested_struct, vec![expected_deeply_nested_struct] ); assert!(log_nonexistent.is_empty()); Ok(()) } #[tokio::test] async fn script_require_log() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Script( name = "LogScript", project = "e2e/sway/logs/script_revert_logs" )), LoadScript( name = "script_instance", script = "LogScript", wallet = "wallet" ) ); macro_rules! reverts_with_msg { ($arg:expr, call, $msg:expr) => { let error = script_instance .main($arg) .call() .await .expect_err("should return a revert error"); assert_revert_containing_msg($msg, error); }; ($arg:expr, simulate, $msg:expr) => { let error = script_instance .main($arg) .simulate(Execution::realistic()) .await .expect_err("should return a revert error"); assert_revert_containing_msg($msg, error); }; } { reverts_with_msg!(MatchEnum::RequirePrimitive, call, "42"); reverts_with_msg!(MatchEnum::RequirePrimitive, simulate, "42"); reverts_with_msg!(MatchEnum::RequireString, call, "fuel"); reverts_with_msg!(MatchEnum::RequireString, simulate, "fuel"); reverts_with_msg!( MatchEnum::RequireCustomGeneric, call, "StructDeeplyNestedGeneric" ); reverts_with_msg!( MatchEnum::RequireCustomGeneric, simulate, "StructDeeplyNestedGeneric" ); reverts_with_msg!(MatchEnum::RequireWithAdditionalLogs, call, "64"); reverts_with_msg!(MatchEnum::RequireWithAdditionalLogs, simulate, "64"); } { reverts_with_msg!(MatchEnum::RevWLogPrimitive, call, "42"); reverts_with_msg!(MatchEnum::RevWLogPrimitive, simulate, "42"); reverts_with_msg!(MatchEnum::RevWLogString, call, "fuel"); reverts_with_msg!(MatchEnum::RevWLogString, simulate, "fuel"); reverts_with_msg!( MatchEnum::RevWLogCustomGeneric, call, "StructDeeplyNestedGeneric" ); reverts_with_msg!( MatchEnum::RevWLogCustomGeneric, simulate, "StructDeeplyNestedGeneric" ); } Ok(()) } #[tokio::test] async fn contract_require_from_contract() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen( Contract( name = "MyContract", project = "e2e/sway/contracts/lib_contract", ), Contract( name = "ContractCaller", project = "e2e/sway/contracts/lib_contract_caller", ) ), Deploy( name = "contract_instance", contract = "MyContract", wallet = "wallet", random_salt = false, ), Deploy( name = "contract_caller_instance", contract = "ContractCaller", wallet = "wallet", random_salt = false, ) ); let error = contract_caller_instance .methods() .require_from_contract(contract_instance.id()) .with_contracts(&[&contract_instance]) .call() .await .expect_err("should return a revert error"); assert_revert_containing_msg("require from contract", error); Ok(()) } #[tokio::test] async fn multi_call_contract_require_from_contract() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen( Contract( name = "MyContract", project = "e2e/sway/contracts/lib_contract", ), Contract( name = "ContractLogs", project = "e2e/sway/logs/contract_logs", ), Contract( name = "ContractCaller",
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
true
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/binary_format.rs
e2e/tests/binary_format.rs
#[cfg(test)] mod tests { use std::{convert::TryInto, ops::Range}; use fuels::programs::executable::{Executable, Regular}; const DATA_OFFSET_LOCATION: Range<usize> = 8..16; const CONFIGURABLES_OFFSET_LOCATION: Range<usize> = 16..24; const LEGACY_BINARY_PATH: &str = "../e2e/assets/precompiled_sway/legacy_format_simple_contract.bin"; const NEW_BINARY_PATH: &str = "../e2e/sway/bindings/simple_contract/out/release/simple_contract.bin"; #[test] fn no_configurables_offset_for_old_sway_binaries() { // given let (_, executable) = load(LEGACY_BINARY_PATH); // when let configurables_offset = executable.configurables_offset_in_code().unwrap(); // then assert_eq!(configurables_offset, None); } #[test] fn correct_data_offset_for_old_sway_binaries() { // given let (binary, executable) = load(LEGACY_BINARY_PATH); let expected_data_offset = read_offset(&binary, DATA_OFFSET_LOCATION); // when let data_offset = executable.data_offset_in_code().unwrap(); // then assert_eq!(data_offset, expected_data_offset); } #[test] fn correct_data_offset_for_new_sway_binaries() { // given let (binary, executable) = load(NEW_BINARY_PATH); let expected_data_offset = read_offset(&binary, DATA_OFFSET_LOCATION); // when let data_offset = executable.data_offset_in_code().unwrap(); // then assert_eq!(data_offset, expected_data_offset); } #[test] fn correct_configurables_offset_for_new_sway_binaries() { // given let (binary, executable) = load(NEW_BINARY_PATH); let expected_configurables_offset = read_offset(&binary, CONFIGURABLES_OFFSET_LOCATION); // when let configurables_offset = executable.configurables_offset_in_code(); // then let configurables_offset = configurables_offset .expect("to successfully detect a modern binary is used") .expect("to extract the configurables_offset"); assert_eq!(configurables_offset, expected_configurables_offset); } pub fn read_offset(binary: &[u8], range: Range<usize>) -> usize { assert_eq!(range.clone().count(), 8, "must be a range of 8 B"); let data: [u8; 8] = binary[range].try_into().unwrap(); u64::from_be_bytes(data) as usize } fn load(path: &str) -> (Vec<u8>, Executable<Regular>) { let binary = std::fs::read(path).unwrap(); let executable = Executable::from_bytes(binary.clone()); (binary, executable) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/e2e/tests/providers.rs
e2e/tests/providers.rs
use std::{ops::Add, path::Path}; use chrono::{DateTime, Duration, TimeZone, Utc}; use fuel_asm::RegId; use fuel_tx::SubAssetId; use fuels::{ accounts::{ Account, signers::{fake::FakeSigner, private_key::PrivateKeySigner}, }, client::{PageDirection, PaginationRequest}, prelude::*, tx::{ContractIdExt, Receipt, Witness}, types::{ coin_type::CoinType, message::Message, transaction_builders::{BuildableTransaction, ScriptTransactionBuilder}, tx_status::{Failure, Success, TxStatus}, }, }; use futures::StreamExt; use rand::thread_rng; #[tokio::test] async fn test_provider_launch_and_connect() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let signer = PrivateKeySigner::random(&mut thread_rng()); let coins = setup_single_asset_coins( signer.address(), AssetId::zeroed(), DEFAULT_NUM_COINS, DEFAULT_COIN_AMOUNT, ); let provider = setup_test_provider(coins, vec![], None, None).await?; let wallet = Wallet::new(signer, provider.clone()); let contract_id = Contract::load_from( "sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await? .contract_id; let contract_instance_connected = MyContract::new(contract_id, wallet.clone()); let response = contract_instance_connected .methods() .initialize_counter(42) .call() .await?; assert_eq!(42, response.value); let contract_instance_launched = MyContract::new(contract_id, wallet); let response = contract_instance_launched .methods() .increment_counter(10) .call() .await?; assert_eq!(52, response.value); Ok(()) } #[tokio::test] async fn test_network_error() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let node_config = NodeConfig::default(); let chain_config = ChainConfig::default(); let state_config = StateConfig::default(); let service = FuelService::start(node_config, chain_config, state_config).await?; let provider = Provider::connect(service.bound_address().to_string()).await?; let wallet = Wallet::random(&mut thread_rng(), provider.clone()); // Simulate an unreachable node service.stop().await.unwrap(); let response = Contract::load_from( "sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await; assert!(matches!(response, Err(Error::Provider(_)))); Ok(()) } #[tokio::test] async fn test_input_message() -> Result<()> { let compare_messages = |messages_from_provider: Vec<Message>, used_messages: Vec<Message>| -> bool { std::iter::zip(&used_messages, &messages_from_provider).all(|(a, b)| { a.sender == b.sender && a.recipient == b.recipient && a.nonce == b.nonce && a.amount == b.amount }) }; let signer = PrivateKeySigner::random(&mut thread_rng()); // coin to pay transaction fee let coins = setup_single_asset_coins(signer.address(), AssetId::zeroed(), 1, DEFAULT_COIN_AMOUNT); let messages = vec![setup_single_message( Address::default(), signer.address(), DEFAULT_COIN_AMOUNT, 0.into(), vec![1, 2], )]; let provider = setup_test_provider(coins, messages.clone(), None, None).await?; let wallet = Wallet::new(signer, provider.clone()); setup_program_test!( Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let spendable_messages = wallet.get_messages().await?; assert!(compare_messages(spendable_messages, messages)); let response = contract_instance .methods() .initialize_counter(42) .call() .await?; assert_eq!(42, response.value); Ok(()) } #[tokio::test] async fn test_input_message_pays_fee() -> Result<()> { let signer = PrivateKeySigner::random(&mut thread_rng()); let messages = setup_single_message( Address::default(), signer.address(), DEFAULT_COIN_AMOUNT, 0.into(), vec![], ); let provider = setup_test_provider(vec![], vec![messages], None, None).await?; let consensus_parameters = provider.consensus_parameters().await?; let base_asset_id = consensus_parameters.base_asset_id(); let wallet = Wallet::new(signer, provider.clone()); abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let deploy_response = Contract::load_from( "sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(&wallet, TxPolicies::default()) .await?; let contract_instance = MyContract::new(deploy_response.contract_id, wallet.clone()); let call_response = contract_instance .methods() .initialize_counter(42) .call() .await?; assert_eq!(42, call_response.value); let balance = wallet.get_asset_balance(base_asset_id).await?; let deploy_fee = deploy_response.tx_status.unwrap().total_fee; let call_fee = call_response.tx_status.total_fee; assert_eq!( balance, (DEFAULT_COIN_AMOUNT - deploy_fee - call_fee) as u128 ); Ok(()) } #[tokio::test] async fn can_increase_block_height() -> Result<()> { // ANCHOR: use_produce_blocks_to_increase_block_height let wallets = launch_custom_provider_and_get_wallets(WalletsConfig::default(), None, None).await?; let wallet = &wallets[0]; let provider = wallet.provider(); assert_eq!(provider.latest_block_height().await?, 0u32); provider.produce_blocks(3, None).await?; assert_eq!(provider.latest_block_height().await?, 3u32); // ANCHOR_END: use_produce_blocks_to_increase_block_height Ok(()) } // debug builds are slower (20x for `fuel-core-lib`, 4x for a release-fuel-core-binary), makes for // flaky tests #[cfg(not(feature = "fuel-core-lib"))] #[tokio::test] async fn can_set_custom_block_time() -> Result<()> { // ANCHOR: use_produce_blocks_custom_time let block_time = 20u32; // seconds let config = NodeConfig { // This is how you specify the time between blocks block_production: Trigger::Interval { block_time: std::time::Duration::from_secs(block_time.into()), }, ..NodeConfig::default() }; let wallets = launch_custom_provider_and_get_wallets(WalletsConfig::default(), Some(config), None) .await?; let wallet = &wallets[0]; let provider = wallet.provider(); assert_eq!(provider.latest_block_height().await?, 0u32); let origin_block_time = provider.latest_block_time().await?.unwrap(); let blocks_to_produce = 3; provider.produce_blocks(blocks_to_produce, None).await?; assert_eq!(provider.latest_block_height().await?, blocks_to_produce); let expected_latest_block_time = origin_block_time .checked_add_signed(Duration::try_seconds((blocks_to_produce * block_time) as i64).unwrap()) .unwrap(); assert_eq!( provider.latest_block_time().await?.unwrap(), expected_latest_block_time ); // ANCHOR_END: use_produce_blocks_custom_time let req = PaginationRequest { cursor: None, results: 10, direction: PageDirection::Forward, }; let blocks: Vec<fuels::types::block::Block> = provider.get_blocks(req).await?.results; assert_eq!(blocks[1].header.time.unwrap().timestamp(), 20); assert_eq!(blocks[2].header.time.unwrap().timestamp(), 40); assert_eq!(blocks[3].header.time.unwrap().timestamp(), 60); Ok(()) } #[tokio::test] async fn can_retrieve_latest_block_time() -> Result<()> { let provider = setup_test_provider(vec![], vec![], None, None).await?; let since_epoch = 1676039910; let latest_timestamp = Utc.timestamp_opt(since_epoch, 0).unwrap(); provider.produce_blocks(1, Some(latest_timestamp)).await?; assert_eq!( provider.latest_block_time().await?.unwrap(), latest_timestamp ); Ok(()) } #[tokio::test] async fn contract_deployment_respects_maturity_and_expiration() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/transaction_block_height/out/release/transaction_block_height-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let provider = wallet.provider().clone(); let maturity = 10; let expiration = 20; let deploy_w_maturity_and_expiration = || { Contract::load_from( "sway/contracts/transaction_block_height/out/release/transaction_block_height.bin", LoadConfiguration::default(), ) .map(|loaded_contract| { loaded_contract.deploy( &wallet, TxPolicies::default() .with_maturity(maturity) .with_expiration(expiration), ) }) }; { let err = deploy_w_maturity_and_expiration()? .await .expect_err("maturity not reached"); assert!(err.to_string().contains("TransactionMaturity")); } { provider.produce_blocks(15, None).await?; deploy_w_maturity_and_expiration()? .await .expect("should succeed. Block height between `maturity` and `expiration`"); } { provider.produce_blocks(15, None).await?; let err = deploy_w_maturity_and_expiration()? .await .expect_err("expiration reached"); assert!(err.to_string().contains("TransactionExpiration")); } Ok(()) } #[tokio::test] async fn test_gas_forwarded_defaults_to_tx_limit() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); // The gas used by the script to call a contract and forward remaining gas limit. let gas_used_by_script = 205; let gas_limit = 225_883; let response = contract_instance .methods() .initialize_counter(42) .with_tx_policies(TxPolicies::default().with_script_gas_limit(gas_limit)) .call() .await?; let gas_forwarded = response .tx_status .receipts .iter() .find(|r| matches!(r, Receipt::Call { .. })) .unwrap() .gas() .unwrap(); assert_eq!(gas_limit, gas_forwarded + gas_used_by_script); Ok(()) } #[tokio::test] async fn test_amount_and_asset_forwarding() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TokenContract", project = "e2e/sway/contracts/token_ops" )), Deploy( name = "contract_instance", contract = "TokenContract", wallet = "wallet", random_salt = false, ), ); let contract_id = contract_instance.contract_id(); let contract_methods = contract_instance.methods(); let asset_id = contract_id.asset_id(&SubAssetId::zeroed()); let mut balance_response = contract_methods .get_balance(contract_id, asset_id) .call() .await?; assert_eq!(balance_response.value, 0); contract_methods.mint_coins(5_000_000).call().await?; balance_response = contract_methods .get_balance(contract_id, asset_id) .call() .await?; assert_eq!(balance_response.value, 5_000_000); let tx_policies = TxPolicies::default().with_script_gas_limit(1_000_000); // Forward 1_000_000 coin amount of base asset_id // this is a big number for checking that amount can be a u64 let call_params = CallParameters::default().with_amount(1_000_000); let response = contract_methods .get_msg_amount() .with_tx_policies(tx_policies) .call_params(call_params)? .call() .await?; assert_eq!(response.value, 1_000_000); let call_response = response .tx_status .receipts .iter() .find(|&r| matches!(r, Receipt::Call { .. })); assert!(call_response.is_some()); assert_eq!(call_response.unwrap().amount().unwrap(), 1_000_000); assert_eq!( call_response.unwrap().asset_id().unwrap(), &AssetId::zeroed() ); let address = wallet.address(); // withdraw some tokens to wallet contract_methods .transfer(1_000_000, asset_id, address.into()) .with_variable_output_policy(VariableOutputPolicy::Exactly(1)) .call() .await?; let asset_id = AssetId::from(*contract_id); let call_params = CallParameters::default() .with_amount(0) .with_asset_id(asset_id); let tx_policies = TxPolicies::default().with_script_gas_limit(1_000_000); let response = contract_methods .get_msg_amount() .with_tx_policies(tx_policies) .call_params(call_params)? .call() .await?; assert_eq!(response.value, 0); let call_response = response .tx_status .receipts .iter() .find(|&r| matches!(r, Receipt::Call { .. })); assert!(call_response.is_some()); assert_eq!(call_response.unwrap().amount().unwrap(), 0); assert_eq!( call_response.unwrap().asset_id().unwrap(), &AssetId::from(*contract_id) ); Ok(()) } #[tokio::test] async fn test_gas_errors() -> Result<()> { let signer = PrivateKeySigner::random(&mut thread_rng()); let number_of_coins = 1; let amount_per_coin = 1_000_000; let coins = setup_single_asset_coins( signer.address(), AssetId::zeroed(), number_of_coins, amount_per_coin, ); let provider = setup_test_provider(coins.clone(), vec![], None, None).await?; let wallet = Wallet::new(signer, provider.clone()); setup_program_test!( Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); // Test running out of gas. Gas price as `None` will be 0. let gas_limit = 42; let contract_instance_call = contract_instance .methods() .initialize_counter(42) // Build the ABI call .with_tx_policies(TxPolicies::default().with_script_gas_limit(gas_limit)); // Test that the call will use more gas than the gas limit let total_gas = contract_instance_call .estimate_transaction_cost(None, None) .await? .total_gas; assert!(total_gas > gas_limit); let response = contract_instance_call .call() .await .expect_err("should error"); let expected = "transaction reverted: OutOfGas"; assert!(response.to_string().starts_with(expected)); // Test for insufficient base asset amount to pay for the transaction fee let response = contract_instance .methods() .initialize_counter(42) // Build the ABI call .with_tx_policies(TxPolicies::default().with_tip(100_000_000_000)) .call() .await .expect_err("should error"); let expected = "Response errors; Validity(InsufficientFeeAmount"; assert!(response.to_string().contains(expected)); Ok(()) } #[tokio::test] async fn test_call_param_gas_errors() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); // Transaction gas_limit is sufficient, call gas_forwarded is too small let contract_methods = contract_instance.methods(); let response = contract_methods .initialize_counter(42) .with_tx_policies(TxPolicies::default().with_script_gas_limit(446000)) .call_params(CallParameters::default().with_gas_forwarded(1))? .call() .await .expect_err("should error"); let expected = "transaction reverted: OutOfGas"; assert!(response.to_string().starts_with(expected)); // Call params gas_forwarded exceeds transaction limit let response = contract_methods .initialize_counter(42) .with_tx_policies(TxPolicies::default().with_script_gas_limit(1)) .call_params(CallParameters::default().with_gas_forwarded(1_000))? .call() .await .expect_err("should error"); assert!(response.to_string().contains(expected)); Ok(()) } #[tokio::test] async fn test_get_gas_used() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let total_gas = contract_instance .methods() .initialize_counter(42) .call() .await? .tx_status .total_gas; assert!(total_gas > 0); Ok(()) } #[tokio::test] async fn test_parse_block_time() -> Result<()> { let signer = PrivateKeySigner::random(&mut thread_rng()); let asset_id = AssetId::zeroed(); let coins = setup_single_asset_coins(signer.address(), asset_id, 1, DEFAULT_COIN_AMOUNT); let provider = setup_test_provider(coins.clone(), vec![], None, None).await?; let wallet = Wallet::new(signer, provider.clone()); let tx_policies = TxPolicies::default().with_script_gas_limit(2000); let wallet_2 = wallet.lock(); let tx_response = wallet .transfer(wallet_2.address(), 100, asset_id, tx_policies) .await?; let tx_response = wallet .try_provider()? .get_transaction_by_id(&tx_response.tx_id) .await? .unwrap(); assert!(tx_response.time.is_some()); let block = wallet .try_provider()? .block_by_height(tx_response.block_height.unwrap()) .await? .unwrap(); assert!(block.header.time.is_some()); Ok(()) } #[tokio::test] async fn test_get_spendable_with_exclusion() -> Result<()> { let coin_amount_1 = 1000; let coin_amount_2 = 500; let signer = PrivateKeySigner::random(&mut thread_rng()); let address = signer.address(); let coins = [coin_amount_1, coin_amount_2] .into_iter() .flat_map(|amount| setup_single_asset_coins(address, AssetId::zeroed(), 1, amount)) .collect::<Vec<_>>(); let message_amount = 200; let message = given_a_message(address, message_amount); let coin_1_utxo_id = coins[0].utxo_id; let coin_2_utxo_id = coins[1].utxo_id; let message_nonce = message.nonce; let provider = setup_test_provider(coins, vec![message], None, None).await?; let wallet = Wallet::new(signer, provider.clone()); let requested_amount = coin_amount_1 + coin_amount_2 + message_amount; let consensus_parameters = provider.consensus_parameters().await?; { let resources = wallet .get_spendable_resources( *consensus_parameters.base_asset_id(), requested_amount.into(), None, ) .await .unwrap(); assert_eq!(resources.len(), 3); } { let filter = ResourceFilter { from: wallet.address(), amount: coin_amount_1.into(), excluded_utxos: vec![coin_2_utxo_id], excluded_message_nonces: vec![message_nonce], ..Default::default() }; let resources = provider.get_spendable_resources(filter).await.unwrap(); match resources.as_slice() { [CoinType::Coin(coin)] => { assert_eq!(coin.utxo_id, coin_1_utxo_id); } _ => { panic!("This shouldn't happen!") } } } Ok(()) } fn given_a_message(address: Address, message_amount: u64) -> Message { setup_single_message( Address::default(), address, message_amount, 0.into(), vec![], ) } fn convert_to_datetime(timestamp: u64) -> DateTime<Utc> { let unix = tai64::Tai64(timestamp).to_unix(); DateTime::from_timestamp(unix, 0).unwrap() } /// This test is here in addition to `can_set_custom_block_time` because even though this test /// passed, the Sway `timestamp` function didn't take into account the block time change. This /// was fixed and this test is here to demonstrate the fix. #[tokio::test] async fn test_sway_timestamp() -> Result<()> { let block_time = 1u32; // seconds let provider_config = NodeConfig { block_production: Trigger::Interval { block_time: std::time::Duration::from_secs(block_time.into()), }, ..NodeConfig::default() }; let mut wallets = launch_custom_provider_and_get_wallets( WalletsConfig::new(Some(1), Some(1), Some(100)), Some(provider_config), None, ) .await?; let wallet = wallets.pop().unwrap(); let provider = wallet.provider(); setup_program_test!( Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/block_timestamp" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let origin_timestamp = provider.latest_block_time().await?.unwrap(); let methods = contract_instance.methods(); let response = methods.return_timestamp().call().await?; let mut expected_datetime = origin_timestamp.add(Duration::try_seconds(block_time as i64).unwrap()); assert_eq!(convert_to_datetime(response.value), expected_datetime); let blocks_to_produce = 600; provider.produce_blocks(blocks_to_produce, None).await?; let response = methods.return_timestamp().call().await?; // `produce_blocks` call expected_datetime = expected_datetime .add(Duration::try_seconds((block_time * blocks_to_produce) as i64).unwrap()); // method call expected_datetime = expected_datetime.add(Duration::try_seconds(block_time as i64).unwrap()); assert_eq!(convert_to_datetime(response.value), expected_datetime); assert_eq!( provider.latest_block_time().await?.unwrap(), expected_datetime ); Ok(()) } #[cfg(feature = "coin-cache")] async fn create_transfer(wallet: &Wallet, amount: u64, to: Address) -> Result<ScriptTransaction> { let asset_id = AssetId::zeroed(); let inputs = wallet .get_asset_inputs_for_amount(asset_id, amount.into(), None) .await?; let outputs = wallet.get_asset_outputs_for_amount(to, asset_id, amount); let mut tb = ScriptTransactionBuilder::prepare_transfer(inputs, outputs, TxPolicies::default()); wallet.adjust_for_fee(&mut tb, amount.into()).await?; wallet.add_witnesses(&mut tb)?; tb.build(wallet.provider()).await } #[cfg(feature = "coin-cache")] #[tokio::test] async fn transactions_with_the_same_utxo() -> Result<()> { use fuels::types::errors::transaction; let wallet_1 = launch_provider_and_get_wallet().await?; let provider = wallet_1.provider(); let wallet_2 = Wallet::random(&mut thread_rng(), provider.clone()); let tx_1 = create_transfer(&wallet_1, 100, wallet_2.address()).await?; let tx_2 = create_transfer(&wallet_1, 101, wallet_2.address()).await?; let _tx_id = provider.send_transaction(tx_1).await?; let res = provider.send_transaction(tx_2).await; let err = res.expect_err("is error"); assert!(matches!( err, Error::Transaction(transaction::Reason::Validation(..)) )); assert!( err.to_string() .contains("was submitted recently in a transaction ") ); Ok(()) } #[cfg(feature = "coin-cache")] #[tokio::test] async fn coin_caching() -> Result<()> { let amount = 1000; let num_coins = 50; let mut wallets = launch_custom_provider_and_get_wallets( WalletsConfig::new(Some(1), Some(num_coins), Some(amount)), Some(NodeConfig::default()), None, ) .await?; let wallet_1 = wallets.pop().unwrap(); let provider = wallet_1.provider(); let wallet_2 = Wallet::random(&mut thread_rng(), provider.clone()); // Consecutively send transfer txs. Without caching, the txs will // end up trying to use the same input coins because 'get_spendable_coins()' // won't filter out recently used coins. let num_iterations = 10; let amount_to_send = 100; let mut tx_ids = vec![]; for _ in 0..num_iterations { let tx = create_transfer(&wallet_1, amount_to_send, wallet_2.address()).await?; let tx_id = provider.send_transaction(tx).await?; tx_ids.push(tx_id); } provider.produce_blocks(10, None).await?; // Confirm all txs are settled for tx_id in tx_ids { let status = provider.tx_status(&tx_id).await?; assert!(matches!(status, TxStatus::Success { .. })); } // Verify the transfers were successful assert_eq!( wallet_2.get_asset_balance(&AssetId::zeroed()).await?, (num_iterations * amount_to_send) as u128 ); Ok(()) } #[cfg(feature = "coin-cache")] async fn create_revert_tx(wallet: &Wallet) -> Result<ScriptTransaction> { let script = std::fs::read("sway/scripts/reverting/out/release/reverting.bin")?; let amount = 1u64; let asset_id = AssetId::zeroed(); let inputs = wallet .get_asset_inputs_for_amount(asset_id, amount.into(), None) .await?; let outputs = wallet.get_asset_outputs_for_amount(Address::default(), asset_id, amount); let mut tb = ScriptTransactionBuilder::prepare_transfer(inputs, outputs, TxPolicies::default()) .with_script(script); wallet.adjust_for_fee(&mut tb, amount.into()).await?; wallet.add_witnesses(&mut tb)?; tb.build(wallet.provider()).await } #[cfg(feature = "coin-cache")] #[tokio::test] async fn test_cache_invalidation_on_await() -> Result<()> { let block_time = 1u32; let provider_config = NodeConfig { block_production: Trigger::Interval { block_time: std::time::Duration::from_secs(block_time.into()), }, ..NodeConfig::default() }; // create wallet with 1 coin so that the cache prevents further // spending unless the coin is invalidated from the cache let mut wallets = launch_custom_provider_and_get_wallets( WalletsConfig::new(Some(1), Some(1), Some(100)), Some(provider_config), None, ) .await?; let wallet = wallets.pop().unwrap(); let provider = wallet.provider(); let tx = create_revert_tx(&wallet).await?; // Pause time so that the cache doesn't invalidate items based on TTL tokio::time::pause(); // tx inputs should be cached and then invalidated due to the tx failing let tx_status = provider.send_transaction_and_await_commit(tx).await?; assert!(matches!(tx_status, TxStatus::Failure { .. })); let consensus_parameters = provider.consensus_parameters().await?; let coins = wallet .get_spendable_resources(*consensus_parameters.base_asset_id(), 1, None) .await?; assert_eq!(coins.len(), 1); Ok(()) } #[tokio::test] async fn can_fetch_mint_transactions() -> Result<()> { setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet", random_salt = false, ), ); let provider = wallet.provider(); let transactions = provider .get_transactions(PaginationRequest { cursor: None, results: 20, direction: PageDirection::Forward, }) .await? .results; // TODO: remove once (fuels-rs#1093)[https://github.com/FuelLabs/fuels-rs/issues/1093] is in // until then the type is explicitly mentioned to check that we're reexporting it through fuels let _: ::fuels::types::transaction::MintTransaction = transactions .into_iter() .find_map(|tx| match tx.transaction { TransactionType::Mint(tx) => Some(tx), _ => None, }) .expect("Should have had at least one mint transaction"); Ok(()) } #[tokio::test] async fn test_build_with_provider() -> Result<()> { let wallet = launch_provider_and_get_wallet().await?; let provider = wallet.provider(); let receiver = Wallet::random(&mut thread_rng(), provider.clone()); let consensus_parameters = provider.consensus_parameters().await?; let inputs = wallet .get_asset_inputs_for_amount(*consensus_parameters.base_asset_id(), 100, None) .await?; let outputs = wallet.get_asset_outputs_for_amount( receiver.address(), *consensus_parameters.base_asset_id(), 100, ); let mut tb = ScriptTransactionBuilder::prepare_transfer(inputs, outputs, TxPolicies::default()); wallet.add_witnesses(&mut tb)?; let tx = tb.build(provider).await?; provider.send_transaction_and_await_commit(tx).await?; let receiver_balance = receiver .get_asset_balance(consensus_parameters.base_asset_id()) .await?; assert_eq!(receiver_balance, 100); Ok(()) } #[tokio::test] async fn send_transaction_and_await_status() -> Result<()> { let wallet = launch_provider_and_get_wallet().await?; let provider = wallet.provider(); let consensus_parameters = provider.consensus_parameters().await?; let inputs = wallet .get_asset_inputs_for_amount(*consensus_parameters.base_asset_id(), 100, None) .await?; let outputs = wallet.get_asset_outputs_for_amount( Address::default(), *consensus_parameters.base_asset_id(), 100, ); // Given let mut tb = ScriptTransactionBuilder::prepare_transfer(inputs, outputs, TxPolicies::default()); wallet.add_witnesses(&mut tb)?; let tx = tb.build(provider).await?; // When let status = provider.send_transaction_and_await_status(tx, true).await?; // Then assert_eq!(status.len(), 3); assert!(status.iter().enumerate().all(|(i, tx_status)| { matches!( (i, tx_status.clone().unwrap()), (0, TxStatus::Submitted) | (1, TxStatus::PreconfirmationSuccess { .. }) | (2, TxStatus::Success { .. }) ) })); Ok(()) } #[tokio::test] async fn send_transaction_and_subscribe_status() -> Result<()> { let config = NodeConfig { block_production: Trigger::Never, ..NodeConfig::default() }; let wallet = launch_custom_provider_and_get_wallets(WalletsConfig::default(), Some(config), None) .await?[0] .clone(); let provider = wallet.provider(); let consensus_parameters = provider.consensus_parameters().await?; let inputs = wallet .get_asset_inputs_for_amount(*consensus_parameters.base_asset_id(), 100, None) .await?; let outputs = wallet.get_asset_outputs_for_amount( Address::default(), *consensus_parameters.base_asset_id(), 100, );
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
true
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/codec/src/lib.rs
examples/codec/src/lib.rs
#[cfg(test)] mod tests { use fuels::{ core::codec::{DecoderConfig, EncoderConfig}, types::errors::Result, }; #[test] fn encoding_a_type() -> Result<()> { //ANCHOR: encoding_example use fuels::{ core::{codec::ABIEncoder, traits::Tokenizable}, macros::Tokenizable, }; #[derive(Tokenizable)] struct MyStruct { field: u64, } let instance = MyStruct { field: 101 }; let _encoded: Vec<u8> = ABIEncoder::default().encode(&[instance.into_token()])?; //ANCHOR_END: encoding_example Ok(()) } #[test] fn encoding_via_macro() -> Result<()> { //ANCHOR: encoding_example_w_macro use fuels::{core::codec::calldata, macros::Tokenizable}; #[derive(Tokenizable)] struct MyStruct { field: u64, } let _: Vec<u8> = calldata!(MyStruct { field: 101 }, MyStruct { field: 102 })?; //ANCHOR_END: encoding_example_w_macro Ok(()) } #[test] fn decoding_example() -> Result<()> { // ANCHOR: decoding_example use fuels::{ core::{ codec::ABIDecoder, traits::{Parameterize, Tokenizable}, }, macros::{Parameterize, Tokenizable}, types::Token, }; #[derive(Parameterize, Tokenizable)] struct MyStruct { field: u64, } let bytes: &[u8] = &[0, 0, 0, 0, 0, 0, 0, 101]; let token: Token = ABIDecoder::default().decode(&MyStruct::param_type(), bytes)?; let _: MyStruct = MyStruct::from_token(token)?; // ANCHOR_END: decoding_example Ok(()) } #[test] fn decoding_example_try_into() -> Result<()> { // ANCHOR: decoding_example_try_into use fuels::macros::{Parameterize, Tokenizable, TryFrom}; #[derive(Parameterize, Tokenizable, TryFrom)] struct MyStruct { field: u64, } let bytes: &[u8] = &[0, 0, 0, 0, 0, 0, 0, 101]; let _: MyStruct = bytes.try_into()?; // ANCHOR_END: decoding_example_try_into Ok(()) } #[test] fn configuring_the_decoder() -> Result<()> { // ANCHOR: configuring_the_decoder use fuels::core::codec::ABIDecoder; ABIDecoder::new(DecoderConfig { max_depth: 5, max_tokens: 100, }); // ANCHOR_END: configuring_the_decoder Ok(()) } #[test] fn configuring_the_encoder() -> Result<()> { // ANCHOR: configuring_the_encoder use fuels::core::codec::ABIEncoder; ABIEncoder::new(EncoderConfig { max_depth: 5, max_tokens: 100, }); // ANCHOR_END: configuring_the_encoder Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/wallets/src/lib.rs
examples/wallets/src/lib.rs
#[cfg(test)] mod tests { use fuels::{ accounts::{ keystore::Keystore, signers::{derivation::DEFAULT_DERIVATION_PATH, private_key::PrivateKeySigner}, }, crypto::SecretKey, prelude::*, }; use rand::thread_rng; #[tokio::test] async fn create_random_wallet() -> Result<()> { // ANCHOR: create_random_wallet use fuels::prelude::*; // Use the test helper to setup a test provider. let provider = setup_test_provider(vec![], vec![], None, None).await?; // Create the wallet. let _wallet = Wallet::random(&mut thread_rng(), provider); // ANCHOR_END: create_random_wallet Ok(()) } #[tokio::test] async fn create_wallet_from_secret_key() -> std::result::Result<(), Box<dyn std::error::Error>> { // ANCHOR: create_wallet_from_secret_key use std::str::FromStr; use fuels::{crypto::SecretKey, prelude::*}; // Use the test helper to setup a test provider. let provider = setup_test_provider(vec![], vec![], None, None).await?; // Setup the private key. let secret = SecretKey::from_str( "5f70feeff1f229e4a95e1056e8b4d80d0b24b565674860cc213bdb07127ce1b1", )?; // Create the wallet. let _wallet = Wallet::new(PrivateKeySigner::new(secret), provider); // ANCHOR_END: create_wallet_from_secret_key Ok(()) } #[tokio::test] async fn create_wallet_from_mnemonic() -> Result<()> { // ANCHOR: create_wallet_from_mnemonic use fuels::prelude::*; let phrase = "oblige salon price punch saddle immune slogan rare snap desert retire surprise"; // Use the test helper to setup a test provider. let provider = setup_test_provider(vec![], vec![], None, None).await?; // Create first account from mnemonic phrase. let key = SecretKey::new_from_mnemonic_phrase_with_path(phrase, "m/44'/1179993420'/0'/0/0")?; let signer = PrivateKeySigner::new(key); let _wallet = Wallet::new(signer, provider.clone()); // Or with the default derivation path. let key = SecretKey::new_from_mnemonic_phrase_with_path(phrase, DEFAULT_DERIVATION_PATH)?; let signer = PrivateKeySigner::new(key); let wallet = Wallet::new(signer, provider); let expected_address = "f18b6446deb8135544ba60333e5b7522685cd2cf64aa4e4c75df725149850b65"; assert_eq!(wallet.address().to_string(), expected_address); // ANCHOR_END: create_wallet_from_mnemonic Ok(()) } #[tokio::test] async fn create_and_store_mnemonic_key() -> Result<()> { // ANCHOR: create_and_store_mnemonic_key let dir = std::env::temp_dir(); let keystore = Keystore::new(&dir); let phrase = "oblige salon price punch saddle immune slogan rare snap desert retire surprise"; // Create a key from the mnemonic phrase using the default derivation path. let key = SecretKey::new_from_mnemonic_phrase_with_path(phrase, DEFAULT_DERIVATION_PATH)?; let password = "my_master_password"; // Encrypt and store the key on disk. It can be recovered using `Keystore::load_key`. let uuid = keystore.save_key(key, password, thread_rng())?; // Recover key from disk let recovered_key = keystore.load_key(&uuid, password)?; // ANCHOR_END: create_and_store_mnemonic_key assert_eq!(key, recovered_key); Ok(()) } #[tokio::test] async fn wallet_transfer() -> Result<()> { // ANCHOR: wallet_transfer use fuels::prelude::*; // Setup 2 test wallets with 1 coin each. let num_wallets = 2; let coins_per_wallet = 1; let coin_amount = 2; let wallets = launch_custom_provider_and_get_wallets( WalletsConfig::new(Some(num_wallets), Some(coins_per_wallet), Some(coin_amount)), None, None, ) .await?; // Transfer the base asset with amount 1 from wallet 1 to wallet 2. let transfer_amount = 1; let asset_id = Default::default(); let _res = wallets[0] .transfer( wallets[1].address(), transfer_amount, asset_id, TxPolicies::default(), ) .await?; let wallet_2_final_coins = wallets[1].get_coins(AssetId::zeroed()).await?; // Check that wallet 2 now has 2 coins. assert_eq!(wallet_2_final_coins.len(), 2); // ANCHOR_END: wallet_transfer Ok(()) } #[tokio::test] async fn wallet_contract_transfer() -> Result<()> { use fuels::prelude::*; use rand::Fill; let mut rng = rand::thread_rng(); let base_asset = AssetConfig { id: AssetId::zeroed(), num_coins: 1, coin_amount: 1000, }; let mut random_asset_id = AssetId::zeroed(); random_asset_id.try_fill(&mut rng).unwrap(); let random_asset = AssetConfig { id: random_asset_id, num_coins: 3, coin_amount: 100, }; let wallet_config = WalletsConfig::new_multiple_assets(1, vec![random_asset, base_asset]); let wallet = launch_custom_provider_and_get_wallets(wallet_config, None, None) .await? .pop() .unwrap(); let contract_id = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; // ANCHOR: wallet_contract_transfer // Check the current balance of the contract with id 'contract_id'. let contract_balances = wallet .try_provider()? .get_contract_balances(&contract_id) .await?; assert!(contract_balances.is_empty()); // Transfer an amount of 300 to the contract. let amount = 300; let asset_id = random_asset_id; let _res = wallet .force_transfer_to_contract(contract_id, amount, asset_id, TxPolicies::default()) .await?; // Check that the contract now has 1 coin. let contract_balances = wallet .try_provider()? .get_contract_balances(&contract_id) .await?; assert_eq!(contract_balances.len(), 1); let random_asset_balance = contract_balances.get(&random_asset_id).unwrap(); assert_eq!(*random_asset_balance, 300); // ANCHOR_END: wallet_contract_transfer Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn setup_multiple_wallets() -> Result<()> { // ANCHOR: multiple_wallets_helper use fuels::prelude::*; // This helper launches a local node and provides 10 test wallets linked to it. // The initial balance defaults to 1 coin per wallet with an amount of 1_000_000_000. let wallets = launch_custom_provider_and_get_wallets(WalletsConfig::default(), None, None).await?; // ANCHOR_END: multiple_wallets_helper // ANCHOR: setup_5_wallets let num_wallets = 5; let coins_per_wallet = 3; let amount_per_coin = 100; let config = WalletsConfig::new( Some(num_wallets), Some(coins_per_wallet), Some(amount_per_coin), ); // Launches a local node and provides test wallets as specified by the config. let wallets = launch_custom_provider_and_get_wallets(config, None, None).await?; // ANCHOR_END: setup_5_wallets Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn setup_wallet_multiple_assets() -> Result<()> { // ANCHOR: multiple_assets_wallet // ANCHOR: multiple_assets_coins use fuels::prelude::*; let signer = PrivateKeySigner::random(&mut thread_rng()); let num_assets = 5; let coins_per_asset = 10; let amount_per_coin = 15; let (coins, asset_ids) = setup_multiple_assets_coins( signer.address(), num_assets, coins_per_asset, amount_per_coin, ); // ANCHOR_END: multiple_assets_coins let provider = setup_test_provider(coins.clone(), vec![], None, None).await?; let wallet = Wallet::new(signer, provider); // ANCHOR_END: multiple_assets_wallet Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn setup_wallet_custom_assets() -> std::result::Result<(), Box<dyn std::error::Error>> { // ANCHOR: custom_assets_wallet use fuels::prelude::*; use rand::Fill; let mut rng = rand::thread_rng(); let signer = PrivateKeySigner::random(&mut rng); let asset_base = AssetConfig { id: AssetId::zeroed(), num_coins: 2, coin_amount: 4, }; let mut asset_id_1 = AssetId::zeroed(); asset_id_1.try_fill(&mut rng)?; let asset_1 = AssetConfig { id: asset_id_1, num_coins: 6, coin_amount: 8, }; let mut asset_id_2 = AssetId::zeroed(); asset_id_2.try_fill(&mut rng)?; let asset_2 = AssetConfig { id: asset_id_2, num_coins: 10, coin_amount: 12, }; let assets = vec![asset_base, asset_1, asset_2]; let coins = setup_custom_assets_coins(signer.address(), &assets); let provider = setup_test_provider(coins, vec![], None, None).await?; let wallet = Wallet::new(signer, provider.clone()); // ANCHOR_END: custom_assets_wallet // ANCHOR: custom_assets_wallet_short let num_wallets = 1; let wallet_config = WalletsConfig::new_multiple_assets(num_wallets, assets); let wallets = launch_custom_provider_and_get_wallets(wallet_config, None, None).await?; // ANCHOR_END: custom_assets_wallet_short // ANCHOR: wallet_to_address let wallet = Wallet::random(&mut rng, provider); let address: Address = wallet.address(); // ANCHOR_END: wallet_to_address Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn get_balances() -> Result<()> { use std::collections::HashMap; use fuels::{ prelude::{DEFAULT_COIN_AMOUNT, DEFAULT_NUM_COINS, launch_provider_and_get_wallet}, types::AssetId, }; let wallet = launch_provider_and_get_wallet().await?; // ANCHOR: get_asset_balance let asset_id = AssetId::zeroed(); let balance: u128 = wallet.get_asset_balance(&asset_id).await?; // ANCHOR_END: get_asset_balance // ANCHOR: get_balances let balances: HashMap<String, u128> = wallet.get_balances().await?; // ANCHOR_END: get_balances // ANCHOR: get_balance_hashmap let asset_balance = balances.get(&asset_id.to_string()).unwrap(); // ANCHOR_END: get_balance_hashmap assert_eq!( *asset_balance, (DEFAULT_COIN_AMOUNT * DEFAULT_NUM_COINS) as u128 ); Ok(()) } #[tokio::test] async fn wallet_transfer_to_base_layer() -> Result<()> { // ANCHOR: wallet_withdraw_to_base use std::str::FromStr; use fuels::prelude::*; let wallets = launch_custom_provider_and_get_wallets( WalletsConfig::new(Some(1), None, None), None, None, ) .await?; let wallet = wallets.first().unwrap(); let amount = 1000; let base_layer_address = Address::from_str( "0x4710162c2e3a95a6faff05139150017c9e38e5e280432d546fae345d6ce6d8fe", )?; // Transfer an amount of 1000 to the specified base layer address. let response = wallet .withdraw_to_base_layer(base_layer_address, amount, TxPolicies::default()) .await?; let _block_height = wallet.provider().produce_blocks(1, None).await?; // Retrieve a message proof from the provider. let proof = wallet .try_provider()? .get_message_proof(&response.tx_id, &response.nonce, None, Some(2)) .await?; // Verify the amount and recipient. assert_eq!(proof.amount, amount); assert_eq!(proof.recipient, base_layer_address); // ANCHOR_END: wallet_withdraw_to_base Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/macros/src/lib.rs
examples/macros/src/lib.rs
extern crate alloc; #[cfg(test)] mod tests { use fuels::prelude::*; #[test] fn example_of_abigen_usage() { // ANCHOR: multiple_abigen_program_types abigen!( Contract( name = "ContractA", abi = "e2e/sway/bindings/sharing_types/contract_a/out/release/contract_a-abi.json" ), Contract( name = "ContractB", abi = "e2e/sway/bindings/sharing_types/contract_b/out/release/contract_b-abi.json" ), Script( name = "MyScript", abi = "e2e/sway/scripts/arguments/out/release/arguments-abi.json" ), Predicate( name = "MyPredicateEncoder", abi = "e2e/sway/predicates/basic_predicate/out/release/basic_predicate-abi.json" ), ); // ANCHOR_END: multiple_abigen_program_types } #[test] fn macro_deriving() { // ANCHOR: deriving_traits use fuels::macros::{Parameterize, Tokenizable}; #[derive(Parameterize, Tokenizable)] #[allow(dead_code)] struct MyStruct { field_a: u8, } #[derive(Parameterize, Tokenizable)] #[allow(dead_code)] enum SomeEnum { A(MyStruct), B(Vec<u64>), } // ANCHOR_END: deriving_traits } #[test] fn macro_deriving_extra() { { use fuels::{ core as fuels_core_elsewhere, macros::{Parameterize, Tokenizable}, types as fuels_types_elsewhere, }; // ANCHOR: deriving_traits_paths #[derive(Parameterize, Tokenizable)] #[FuelsCorePath = "fuels_core_elsewhere"] #[FuelsTypesPath = "fuels_types_elsewhere"] #[allow(dead_code)] pub struct SomeStruct { field_a: u64, } // ANCHOR_END: deriving_traits_paths } { // ANCHOR: deriving_traits_nostd use fuels::macros::{Parameterize, Tokenizable}; #[derive(Parameterize, Tokenizable)] #[NoStd] #[allow(dead_code)] pub struct SomeStruct { field_a: u64, } // ANCHOR_END: deriving_traits_nostd } } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/rust_bindings/src/rust_bindings_formatted.rs
examples/rust_bindings/src/rust_bindings_formatted.rs
pub mod abigen_bindings { pub mod my_contract_mod { #[derive(Debug, Clone)] pub struct MyContract<A: ::fuels::accounts::Account> { contract_id: ::fuels::types::ContractId, account: A, log_decoder: ::fuels::core::codec::LogDecoder, encoder_config: ::fuels::core::codec::EncoderConfig, } impl<A: ::fuels::accounts::Account> MyContract<A> { pub fn new(contract_id: ::fuels::types::ContractId, account: A) -> Self { let log_decoder = ::fuels::core::codec::LogDecoder::new( ::fuels::core::codec::log_formatters_lookup(vec![], contract_id.clone().into()), ); let encoder_config = ::fuels::core::codec::EncoderConfig::default(); Self { contract_id, account, log_decoder, encoder_config, } } pub fn contract_id(&self) -> &::fuels::types::ContractId { self.contract_id } pub fn account(&self) -> A { self.account.clone() } pub fn with_account<U: ::fuels::accounts::Account>(self, account: U) -> MyContract<U> { MyContract { contract_id: self.contract_id, account, log_decoder: self.log_decoder, encoder_config: self.encoder_config, } } pub fn with_encoder_config( mut self, encoder_config: ::fuels::core::codec::EncoderConfig, ) -> MyContract<A> { self.encoder_config = encoder_config; self } pub async fn get_balances( &self, ) -> ::fuels::types::errors::Result< ::std::collections::HashMap<::fuels::types::AssetId, u64>, > { ::fuels::accounts::ViewOnlyAccount::try_provider(&self.account)? .get_contract_balances(&self.contract_id) .await .map_err(::std::convert::Into::into) } pub fn methods(&self) -> MyContractMethods<A> { MyContractMethods { contract_id: self.contract_id.clone(), account: self.account.clone(), log_decoder: self.log_decoder.clone(), encoder_config: self.encoder_config.clone(), } } } pub struct MyContractMethods<A: ::fuels::accounts::Account> { contract_id: ::fuels::types::ContractId, account: A, log_decoder: ::fuels::core::codec::LogDecoder, encoder_config: ::fuels::core::codec::EncoderConfig, } impl<A: ::fuels::accounts::Account> MyContractMethods<A> { #[doc = " This method will read the counter from storage, increment it"] #[doc = " and write the incremented value to storage"] pub fn increment_counter( &self, value: ::core::primitive::u64, ) -> ::fuels::programs::calls::CallHandler< A, ::fuels::programs::calls::ContractCall, ::core::primitive::u64, > { ::fuels::programs::calls::CallHandler::new_contract_call( self.contract_id.clone(), self.account.clone(), ::fuels::core::codec::encode_fn_selector("increment_counter"), &[::fuels::core::traits::Tokenizable::into_token(value)], self.log_decoder.clone(), false, self.encoder_config.clone(), ) } pub fn initialize_counter( &self, value: ::core::primitive::u64, ) -> ::fuels::programs::calls::CallHandler< A, ::fuels::programs::calls::ContractCall, ::core::primitive::u64, > { ::fuels::programs::calls::CallHandler::new_contract_call( self.contract_id.clone(), self.account.clone(), ::fuels::core::codec::encode_fn_selector("initialize_counter"), &[::fuels::core::traits::Tokenizable::into_token(value)], self.log_decoder.clone(), false, self.encoder_config.clone(), ) } } impl<A: ::fuels::accounts::Account> ::fuels::programs::calls::ContractDependency for MyContract<A> { fn id(&self) -> ::fuels::types::ContractId { self.contract_id.clone() } fn log_decoder(&self) -> ::fuels::core::codec::LogDecoder { self.log_decoder.clone() } } #[derive(Clone, Debug, Default)] pub struct MyContractConfigurables { offsets_with_data: ::std::vec::Vec<(u64, ::std::vec::Vec<u8>)>, encoder: ::fuels::core::codec::ABIEncoder, } impl MyContractConfigurables { pub fn new(encoder_config: ::fuels::core::codec::EncoderConfig) -> Self { Self { encoder: ::fuels::core::codec::ABIEncoder::new(encoder_config), ..::std::default::Default::default() } } } impl From<MyContractConfigurables> for ::fuels::core::Configurables { fn from(config: MyContractConfigurables) -> Self { ::fuels::core::Configurables::new(config.offsets_with_data) } } } } pub use abigen_bindings::my_contract_mod::MyContract; pub use abigen_bindings::my_contract_mod::MyContractConfigurables; pub use abigen_bindings::my_contract_mod::MyContractMethods;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/rust_bindings/src/lib.rs
examples/rust_bindings/src/lib.rs
#[cfg(test)] mod tests { use fuels::prelude::Result; #[tokio::test] #[allow(unused_variables)] async fn transform_json_to_bindings() -> Result<()> { use fuels::test_helpers::launch_provider_and_get_wallet; let wallet = launch_provider_and_get_wallet().await?; { // ANCHOR: use_abigen use fuels::prelude::*; // Replace with your own JSON abi path (relative to the root of your crate) abigen!(Contract( name = "MyContractName", abi = "examples/rust_bindings/src/abi.json" )); // ANCHOR_END: use_abigen } { // ANCHOR: abigen_with_string use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = r#" { "programType": "contract", "specVersion": "1", "encodingVersion": "1", "concreteTypes": [ { "concreteTypeId": "1506e6f44c1d6291cdf46395a8e573276a4fa79e8ace3fc891e092ef32d1b0a0", "type": "u64" } ], "functions": [ { "inputs": [ { "name": "value", "concreteTypeId": "1506e6f44c1d6291cdf46395a8e573276a4fa79e8ace3fc891e092ef32d1b0a0" } ], "name": "initialize_counter", "output": "1506e6f44c1d6291cdf46395a8e573276a4fa79e8ace3fc891e092ef32d1b0a0" }, { "inputs": [ { "name": "value", "concreteTypeId": "1506e6f44c1d6291cdf46395a8e573276a4fa79e8ace3fc891e092ef32d1b0a0" } ], "name": "increment_counter", "output": "1506e6f44c1d6291cdf46395a8e573276a4fa79e8ace3fc891e092ef32d1b0a0" } ], "metadataTypes": [] } "# )); // ANCHOR_END: abigen_with_string } Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/cookbook/src/lib.rs
examples/cookbook/src/lib.rs
#[cfg(test)] mod tests { use std::{str::FromStr, time::Duration}; use fuels::{ accounts::{ ViewOnlyAccount, predicate::Predicate, signers::private_key::PrivateKeySigner, wallet::Wallet, }, prelude::Result, test_helpers::{setup_single_asset_coins, setup_test_provider}, tx::ContractIdExt, types::{ Address, AssetId, SubAssetId, transaction::TxPolicies, transaction_builders::{ BuildableTransaction, ScriptTransactionBuilder, TransactionBuilder, }, tx_status::TxStatus, }, }; use rand::thread_rng; #[tokio::test] async fn liquidity() -> Result<()> { use fuels::{ prelude::*, test_helpers::{AssetConfig, WalletsConfig}, }; // ANCHOR: liquidity_abigen abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/liquidity_pool/out/release/liquidity_pool-abi.json" )); // ANCHOR_END: liquidity_abigen // ANCHOR: liquidity_wallet let base_asset_id: AssetId = "0x9ae5b658754e096e4d681c548daf46354495a437cc61492599e33fc64dcdc30c".parse()?; let asset_ids = [AssetId::zeroed(), base_asset_id]; let asset_configs = asset_ids .map(|id| AssetConfig { id, num_coins: 1, coin_amount: 1_000_000, }) .into(); let wallet_config = WalletsConfig::new_multiple_assets(1, asset_configs); let wallets = launch_custom_provider_and_get_wallets(wallet_config, None, None).await?; let wallet = &wallets[0]; // ANCHOR_END: liquidity_wallet // ANCHOR: liquidity_deploy let contract_id = Contract::load_from( "../../e2e/sway/contracts/liquidity_pool/out/release/liquidity_pool.bin", LoadConfiguration::default(), )? .deploy(wallet, TxPolicies::default()) .await? .contract_id; let contract_methods = MyContract::new(contract_id, wallet.clone()).methods(); // ANCHOR_END: liquidity_deploy // ANCHOR: liquidity_deposit let deposit_amount = 1_000_000; let call_params = CallParameters::default() .with_amount(deposit_amount) .with_asset_id(base_asset_id); contract_methods .deposit(wallet.address().into()) .call_params(call_params)? .with_variable_output_policy(VariableOutputPolicy::Exactly(1)) .call() .await?; // ANCHOR_END: liquidity_deposit // ANCHOR: liquidity_withdraw let lp_asset_id = contract_id.asset_id(&SubAssetId::zeroed()); let lp_token_balance = wallet.get_asset_balance(&lp_asset_id).await?; let call_params = CallParameters::default() .with_amount(lp_token_balance.try_into().unwrap()) .with_asset_id(lp_asset_id); contract_methods .withdraw(wallet.address().into()) .call_params(call_params)? .with_variable_output_policy(VariableOutputPolicy::Exactly(1)) .call() .await?; let base_balance = wallet.get_asset_balance(&base_asset_id).await?; assert_eq!(base_balance, deposit_amount as u128); // ANCHOR_END: liquidity_withdraw Ok(()) } #[tokio::test] async fn custom_chain() -> Result<()> { // ANCHOR: custom_chain_import use fuels::{ prelude::*, tx::{ConsensusParameters, FeeParameters, TxParameters}, }; // ANCHOR_END: custom_chain_import // ANCHOR: custom_chain_consensus let tx_params = TxParameters::default() .with_max_gas_per_tx(1_000) .with_max_inputs(2); let fee_params = FeeParameters::default().with_gas_price_factor(10); let mut consensus_parameters = ConsensusParameters::default(); consensus_parameters.set_tx_params(tx_params); consensus_parameters.set_fee_params(fee_params); let chain_config = ChainConfig { consensus_parameters, ..ChainConfig::default() }; // ANCHOR_END: custom_chain_consensus // ANCHOR: custom_chain_coins let signer = PrivateKeySigner::random(&mut thread_rng()); let coins = setup_single_asset_coins( signer.address(), Default::default(), DEFAULT_NUM_COINS, DEFAULT_COIN_AMOUNT, ); // ANCHOR_END: custom_chain_coins // ANCHOR: custom_chain_provider let node_config = NodeConfig::default(); let _provider = setup_test_provider(coins, vec![], Some(node_config), Some(chain_config)).await?; // ANCHOR_END: custom_chain_provider Ok(()) } #[tokio::test] async fn transfer_multiple() -> Result<()> { use std::str::FromStr; use fuels::prelude::*; // ANCHOR: transfer_multiple_setup let wallet_1_signer = PrivateKeySigner::random(&mut thread_rng()); const NUM_ASSETS: u64 = 5; const AMOUNT: u64 = 100_000; const NUM_COINS: u64 = 1; let (coins, _) = setup_multiple_assets_coins(wallet_1_signer.address(), NUM_ASSETS, NUM_COINS, AMOUNT); let provider = setup_test_provider(coins, vec![], None, None).await?; let wallet_1 = Wallet::new(wallet_1_signer, provider.clone()); let wallet_2 = Wallet::random(&mut thread_rng(), provider.clone()); // ANCHOR_END: transfer_multiple_setup // ANCHOR: transfer_multiple_input let balances = wallet_1.get_balances().await?; let consensus_parameters = provider.consensus_parameters().await?; let mut inputs = vec![]; let mut outputs = vec![]; for (id_string, amount) in balances { let id = AssetId::from_str(&id_string)?; let input = wallet_1 .get_asset_inputs_for_amount(id, amount, None) .await?; inputs.extend(input); // we don't transfer the full base asset so we can cover fees let output = if id == *consensus_parameters.base_asset_id() { wallet_1.get_asset_outputs_for_amount(wallet_2.address(), id, (amount / 2) as u64) } else { wallet_1.get_asset_outputs_for_amount(wallet_2.address(), id, amount as u64) }; outputs.extend(output); } // ANCHOR_END: transfer_multiple_input // ANCHOR: transfer_multiple_transaction let mut tb = ScriptTransactionBuilder::prepare_transfer(inputs, outputs, TxPolicies::default()); wallet_1.add_witnesses(&mut tb)?; let tx = tb.build(&provider).await?; provider.send_transaction_and_await_commit(tx).await?; let balances = wallet_2.get_balances().await?; assert_eq!(balances.len(), NUM_ASSETS as usize); for (id, balance) in balances { if id == *consensus_parameters.base_asset_id().to_string() { assert_eq!(balance, (AMOUNT / 2) as u128); } else { assert_eq!(balance, AMOUNT as u128); } } // ANCHOR_END: transfer_multiple_transaction Ok(()) } #[tokio::test] #[cfg(any(not(feature = "fuel-core-lib"), feature = "rocksdb"))] async fn create_or_use_rocksdb() -> Result<()> { use std::path::PathBuf; use fuels::prelude::*; // ANCHOR: create_or_use_rocksdb let provider_config = NodeConfig { database_type: DbType::RocksDb(Some(PathBuf::from("/tmp/.spider/db"))), ..NodeConfig::default() }; // ANCHOR_END: create_or_use_rocksdb launch_custom_provider_and_get_wallets(Default::default(), Some(provider_config), None) .await?; Ok(()) } #[tokio::test] async fn custom_transaction() -> Result<()> { let hot_wallet_signer = PrivateKeySigner::random(&mut thread_rng()); let code_path = "../../e2e/sway/predicates/swap/out/release/swap.bin"; let mut predicate = Predicate::load_from(code_path)?; let num_coins = 5; let amount = 1000; let bridged_asset_id = AssetId::from([1u8; 32]); let base_coins = setup_single_asset_coins( hot_wallet_signer.address(), AssetId::zeroed(), num_coins, amount, ); let other_coins = setup_single_asset_coins(predicate.address(), bridged_asset_id, num_coins, amount); let provider = setup_test_provider( base_coins.into_iter().chain(other_coins).collect(), vec![], None, None, ) .await?; provider.produce_blocks(100, None).await?; let hot_wallet = Wallet::new(hot_wallet_signer, provider.clone()); let cold_wallet = Wallet::random(&mut thread_rng(), provider.clone()); predicate.set_provider(provider.clone()); // ANCHOR: custom_tx_receiver let ask_amount = 100; let locked_amount = 500; let bridged_asset_id = AssetId::from([1u8; 32]); let receiver = Address::from_str("09c0b2d1a486c439a87bcba6b46a7a1a23f3897cc83a94521a96da5c23bc58db")?; // ANCHOR_END: custom_tx_receiver // ANCHOR: custom_tx let tb = ScriptTransactionBuilder::default(); // ANCHOR_END: custom_tx // ANCHOR: custom_tx_io_base let consensus_parameters = provider.consensus_parameters().await?; let base_inputs = hot_wallet .get_asset_inputs_for_amount(*consensus_parameters.base_asset_id(), ask_amount, None) .await?; let base_outputs = hot_wallet.get_asset_outputs_for_amount( receiver, *consensus_parameters.base_asset_id(), ask_amount as u64, ); // ANCHOR_END: custom_tx_io_base // ANCHOR: custom_tx_io_other let other_asset_inputs = predicate .get_asset_inputs_for_amount(bridged_asset_id, locked_amount, None) .await?; let other_asset_outputs = predicate.get_asset_outputs_for_amount(cold_wallet.address(), bridged_asset_id, 500); // ANCHOR_END: custom_tx_io_other // ANCHOR: custom_tx_io let inputs = base_inputs .into_iter() .chain(other_asset_inputs.into_iter()) .collect(); let outputs = base_outputs .into_iter() .chain(other_asset_outputs.into_iter()) .collect(); let mut tb = tb.with_inputs(inputs).with_outputs(outputs); // ANCHOR_END: custom_tx_io // ANCHOR: custom_tx_add_signer tb.add_signer(hot_wallet.signer().clone())?; // ANCHOR_END: custom_tx_add_signer // ANCHOR: custom_tx_adjust hot_wallet.adjust_for_fee(&mut tb, 100).await?; // ANCHOR_END: custom_tx_adjust // ANCHOR: custom_tx_policies let tx_policies = TxPolicies::default().with_maturity(64).with_expiration(128); let tb = tb.with_tx_policies(tx_policies); // ANCHOR_END: custom_tx_policies // ANCHOR: custom_tx_build let tx = tb.build(&provider).await?; let tx_id = provider.send_transaction(tx).await?; // ANCHOR_END: custom_tx_build tokio::time::sleep(Duration::from_millis(500)).await; // ANCHOR: custom_tx_verify let status = provider.tx_status(&tx_id).await?; assert!(matches!(status, TxStatus::Success { .. })); let balance: u128 = cold_wallet.get_asset_balance(&bridged_asset_id).await?; assert_eq!(balance, locked_amount); // ANCHOR_END: custom_tx_verify Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/debugging/src/lib.rs
examples/debugging/src/lib.rs
#[cfg(test)] mod tests { use std::collections::HashMap; use fuel_abi_types::abi::unified_program::UnifiedProgramABI; use fuels::{ core::codec::ABIDecoder, macros::abigen, types::{SizedAsciiString, errors::Result, param_types::ParamType}, }; #[test] fn encode_fn_selector() { use fuels::core::codec::encode_fn_selector; // ANCHOR: example_fn_selector // fn some_fn_name(arg1: Vec<str[3]>, arg2: u8) let fn_name = "some_fn_name"; let selector = encode_fn_selector(fn_name); assert_eq!( selector, [ 0, 0, 0, 0, 0, 0, 0, 12, 115, 111, 109, 101, 95, 102, 110, 95, 110, 97, 109, 101 ] ); // ANCHOR_END: example_fn_selector } #[test] fn decoded_debug_matches_rust_debug() -> Result<()> { abigen!(Contract( name = "MyContract", abi = "e2e/sway/types/contracts/generics/out/release/generics-abi.json" )); let json_abi_file = "../../e2e/sway/types/contracts/generics/out/release/generics-abi.json"; let abi_file_contents = std::fs::read_to_string(json_abi_file)?; let parsed_abi = UnifiedProgramABI::from_json_abi(&abi_file_contents)?; let type_lookup = parsed_abi .types .into_iter() .map(|decl| (decl.type_id, decl)) .collect::<HashMap<_, _>>(); let get_first_fn_argument = |fn_name: &str| { parsed_abi .functions .iter() .find(|abi_fun| abi_fun.name == fn_name) .expect("should be there") .inputs .first() .expect("should be there") }; let decoder = ABIDecoder::default(); { // simple struct with a single generic parameter let type_application = get_first_fn_argument("struct_w_generic"); let param_type = ParamType::try_from_type_application(type_application, &type_lookup)?; let expected_struct = SimpleGeneric { single_generic_param: 123u64, }; assert_eq!( format!("{expected_struct:?}"), decoder.decode_as_debug_str(&param_type, [0, 0, 0, 0, 0, 0, 0, 123].as_slice())? ); } { // struct that delegates the generic param internally let type_application = get_first_fn_argument("struct_delegating_generic"); let param_type = ParamType::try_from_type_application(type_application, &type_lookup)?; let expected_struct = PassTheGenericOn { one: SimpleGeneric { single_generic_param: SizedAsciiString::<3>::try_from("abc")?, }, }; assert_eq!( format!("{expected_struct:?}"), decoder.decode_as_debug_str(&param_type, [97, 98, 99].as_slice())? ); } { // enum with generic in variant let type_application = get_first_fn_argument("enum_w_generic"); let param_type = ParamType::try_from_type_application(type_application, &type_lookup)?; let expected_enum = EnumWGeneric::B(10u64); assert_eq!( format!("{expected_enum:?}"), decoder.decode_as_debug_str( &param_type, [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 10].as_slice() )? ); } { // logged type let logged_type = parsed_abi .logged_types .as_ref() .expect("has logs") .first() .expect("has log"); let param_type = ParamType::try_from_type_application(&logged_type.application, &type_lookup)?; let expected_u8 = 1; assert_eq!( format!("{expected_u8}"), decoder.decode_as_debug_str(&param_type, [1].as_slice())? ); } Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/types/src/lib.rs
examples/types/src/lib.rs
#[cfg(test)] mod tests { use fuels::{ prelude::Result, types::{Bits256, EvmAddress, Identity}, }; #[tokio::test] async fn bytes32() -> Result<()> { // ANCHOR: bytes32 use std::str::FromStr; use fuels::types::Bytes32; // Zeroed Bytes32 let b256 = Bytes32::zeroed(); // Grab the inner `[u8; 32]` from // `Bytes32` by dereferencing (i.e. `*`) it. assert_eq!([0u8; 32], *b256); // From a `[u8; 32]`. // ANCHOR: array_to_bytes32 let my_slice = [1u8; 32]; let b256 = Bytes32::new(my_slice); // ANCHOR_END: array_to_bytes32 assert_eq!([1u8; 32], *b256); // From a hex string. // ANCHOR: hex_string_to_bytes32 let hex_str = "0x0000000000000000000000000000000000000000000000000000000000000000"; let b256 = Bytes32::from_str(hex_str)?; // ANCHOR_END: hex_string_to_bytes32 assert_eq!([0u8; 32], *b256); // ANCHOR_END: bytes32 // ANCHOR: bytes32_format let b256_string = b256.to_string(); let b256_hex_string = format!("{b256:#x}"); // ANCHOR_END: bytes32_format assert_eq!(hex_str[2..], b256_string); assert_eq!(hex_str, b256_hex_string); // ANCHOR: bytes32_to_str let _str_from_bytes32: &str = b256.to_string().as_str(); // ANCHOR_END: bytes32_to_str Ok(()) } #[tokio::test] async fn address() -> Result<()> { // ANCHOR: address use std::str::FromStr; use fuels::types::Address; // Zeroed Bytes32 let address = Address::zeroed(); // Grab the inner `[u8; 32]` from // `Bytes32` by dereferencing (i.e. `*`) it. assert_eq!([0u8; 32], *address); // From a `[u8; 32]`. // ANCHOR: array_to_address let my_slice = [1u8; 32]; let address = Address::new(my_slice); // ANCHOR_END: array_to_address assert_eq!([1u8; 32], *address); // From a string. // ANCHOR: hex_string_to_address let hex_str = "0x0000000000000000000000000000000000000000000000000000000000000000"; let address = Address::from_str(hex_str)?; // ANCHOR_END: hex_string_to_address assert_eq!([0u8; 32], *address); // ANCHOR_END: address // ANCHOR: address_to_identity let _identity_from_address = Identity::Address(address); // ANCHOR_END: address_to_identity // ANCHOR: address_to_str let _str_from_address: &str = address.to_string().as_str(); // ANCHOR_END: address_to_str // ANCHOR: address_to_bits256 let bits_256 = Bits256(address.into()); // ANCHOR_END: address_to_bits256 // ANCHOR: b256_to_evm_address let _evm_address = EvmAddress::from(bits_256); // ANCHOR_END: b256_to_evm_address Ok(()) } #[tokio::test] async fn asset_id() -> Result<()> { // ANCHOR: asset_id use std::str::FromStr; use fuels::types::AssetId; // Zeroed Bytes32 let asset_id = AssetId::zeroed(); // Grab the inner `[u8; 32]` from // `Bytes32` by dereferencing (i.e. `*`) it. assert_eq!([0u8; 32], *asset_id); // From a `[u8; 32]`. // ANCHOR: array_to_asset_id let my_slice = [1u8; 32]; let asset_id = AssetId::new(my_slice); // ANCHOR_END: array_to_asset_id assert_eq!([1u8; 32], *asset_id); // From a string. // ANCHOR: string_to_asset_id let hex_str = "0x0000000000000000000000000000000000000000000000000000000000000000"; let asset_id = AssetId::from_str(hex_str)?; // ANCHOR_END: string_to_asset_id assert_eq!([0u8; 32], *asset_id); // ANCHOR_END: asset_id Ok(()) } #[tokio::test] async fn contract_id() -> Result<()> { // ANCHOR: contract_id use std::str::FromStr; use fuels::types::ContractId; // Zeroed Bytes32 let contract_id = ContractId::zeroed(); // Grab the inner `[u8; 32]` from // `Bytes32` by dereferencing (i.e. `*`) it. assert_eq!([0u8; 32], *contract_id); // From a `[u8; 32]`. // ANCHOR: array_to_contract_id let my_slice = [1u8; 32]; let contract_id = ContractId::new(my_slice); // ANCHOR_END: array_to_contract_id assert_eq!([1u8; 32], *contract_id); // From a string. // ANCHOR: string_to_contract_id let hex_str = "0x0000000000000000000000000000000000000000000000000000000000000000"; let contract_id = ContractId::from_str(hex_str)?; // ANCHOR_END: string_to_contract_id assert_eq!([0u8; 32], *contract_id); // ANCHOR_END: contract_id // ANCHOR: contract_id_to_identity let _identity_from_contract_id = Identity::ContractId(contract_id); // ANCHOR_END: contract_id_to_identity // ANCHOR: contract_id_to_str let _str_from_contract_id: &str = contract_id.to_string().as_str(); // ANCHOR_END: contract_id_to_str Ok(()) } #[tokio::test] async fn type_conversion() -> Result<()> { // ANCHOR: type_conversion use fuels::types::{AssetId, ContractId}; let contract_id = ContractId::new([1u8; 32]); let asset_id: AssetId = AssetId::new(*contract_id); assert_eq!([1u8; 32], *asset_id); // ANCHOR_END: type_conversion // ANCHOR: asset_id_to_str let _str_from_asset_id: &str = asset_id.to_string().as_str(); // ANCHOR_END: asset_id_to_str // ANCHOR: contract_id_to_bits256 let _contract_id_to_bits_256 = Bits256(contract_id.into()); // ANCHOR_END: contract_id_to_bits256 // ANCHOR: asset_id_to_bits256 let _asset_id_to_bits_256 = Bits256(asset_id.into()); // ANCHOR_END: asset_id_to_bits256 Ok(()) } #[tokio::test] async fn unused_generics() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/types/contracts/generics/out/release/generics-abi.json" )); // ANCHOR: unused_generics_struct assert_eq!( <StructUnusedGeneric<u16, u32>>::new(15), StructUnusedGeneric { field: 15, _unused_generic_0: std::marker::PhantomData, _unused_generic_1: std::marker::PhantomData } ); // ANCHOR_END: unused_generics_struct let my_enum = <EnumUnusedGeneric<u32, u64>>::One(15); // ANCHOR: unused_generics_enum match my_enum { EnumUnusedGeneric::One(_value) => {} EnumUnusedGeneric::IgnoreMe(..) => panic!("Will never receive this variant"), } // ANCHOR_END: unused_generics_enum Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/providers/src/lib.rs
examples/providers/src/lib.rs
#[cfg(test)] mod tests { use std::time::Duration; use fuels::{accounts::signers::private_key::PrivateKeySigner, prelude::Result}; #[ignore = "testnet currently not compatible with the sdk"] #[tokio::test] async fn connect_to_fuel_node() -> Result<()> { // ANCHOR: connect_to_testnet use std::str::FromStr; use fuels::{crypto::SecretKey, prelude::*}; // Create a provider pointing to the testnet. let provider = Provider::connect("testnet.fuel.network").await.unwrap(); // Setup a private key let secret = SecretKey::from_str( "a1447cd75accc6b71a976fd3401a1f6ce318d27ba660b0315ee6ac347bf39568", )?; // Create the wallet let wallet = Wallet::new(PrivateKeySigner::new(secret), provider); // Get the wallet address. Used later with the faucet dbg!(wallet.address().to_string()); // ANCHOR_END: connect_to_testnet let provider = setup_test_provider(vec![], vec![], None, None).await?; let port = provider.url().split(':').next_back().unwrap(); // ANCHOR: local_node_address let _provider = Provider::connect(format!("127.0.0.1:{port}")).await?; // ANCHOR_END: local_node_address Ok(()) } #[tokio::test] async fn query_the_blockchain() -> Result<()> { // ANCHOR: setup_test_blockchain use fuels::prelude::*; // Set up our test blockchain. // Create a random signer // ANCHOR: setup_single_asset let wallet_signer = PrivateKeySigner::random(&mut rand::thread_rng()); // How many coins in our wallet. let number_of_coins = 1; // The amount/value in each coin in our wallet. let amount_per_coin = 3; let coins = setup_single_asset_coins( wallet_signer.address(), AssetId::zeroed(), number_of_coins, amount_per_coin, ); // ANCHOR_END: setup_single_asset // ANCHOR: configure_retry let retry_config = RetryConfig::new(3, Backoff::Fixed(Duration::from_secs(2)))?; let provider = setup_test_provider(coins.clone(), vec![], None, None) .await? .with_retry_config(retry_config); // ANCHOR_END: configure_retry // ANCHOR_END: setup_test_blockchain // ANCHOR: get_coins let consensus_parameters = provider.consensus_parameters().await?; let coins = provider .get_coins( &wallet_signer.address(), *consensus_parameters.base_asset_id(), ) .await?; assert_eq!(coins.len(), 1); // ANCHOR_END: get_coins // ANCHOR: get_spendable_resources let filter = ResourceFilter { from: wallet_signer.address(), amount: 1, ..Default::default() }; let spendable_resources = provider.get_spendable_resources(filter).await?; assert_eq!(spendable_resources.len(), 1); // ANCHOR_END: get_spendable_resources // ANCHOR: get_balances let _balances = provider.get_balances(&wallet_signer.address()).await?; // ANCHOR_END: get_balances Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/contracts/src/lib.rs
examples/contracts/src/lib.rs
#[cfg(test)] mod tests { use std::{collections::HashSet, time::Duration}; use fuels::{ accounts::signers::{fake::FakeSigner, private_key::PrivateKeySigner}, core::codec::{ABIFormatter, DecoderConfig, EncoderConfig, encode_fn_selector}, prelude::{LoadConfiguration, NodeConfig, StorageConfiguration}, programs::debug::ScriptType, test_helpers::{ChainConfig, StateConfig}, tx::ContractIdExt, types::{ SubAssetId, errors::{Result, transaction::Reason}, }, }; use rand::{Rng, thread_rng}; #[tokio::test] async fn instantiate_client() -> Result<()> { // ANCHOR: instantiate_client use fuels::prelude::{FuelService, Provider}; // Run the fuel node. let server = FuelService::start( NodeConfig::default(), ChainConfig::default(), StateConfig::default(), ) .await?; // Create a client that will talk to the node created above. let client = Provider::from(server.bound_address()).await?; assert!(client.healthy().await?); // ANCHOR_END: instantiate_client Ok(()) } #[tokio::test] async fn deploy_contract() -> Result<()> { use fuels::prelude::*; // ANCHOR: deploy_contract // This helper will launch a local node and provide a test wallet linked to it let wallet = launch_provider_and_get_wallet().await?; // This will load and deploy your contract binary to the chain so that its ID can // be used to initialize the instance let contract_id = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; println!("Contract deployed @ {contract_id}"); // ANCHOR_END: deploy_contract Ok(()) } #[tokio::test] async fn setup_program_test_example() -> Result<()> { use fuels::prelude::*; // ANCHOR: deploy_contract_setup_macro_short setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet" ), ); let response = contract_instance .methods() .initialize_counter(42) .call() .await?; assert_eq!(42, response.value); // ANCHOR_END: deploy_contract_setup_macro_short Ok(()) } #[tokio::test] async fn contract_call_cost_estimation() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let contract_id = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; // ANCHOR: contract_call_cost_estimation let contract_instance = MyContract::new(contract_id, wallet); let tolerance = Some(0.0); let block_horizon = Some(1); let transaction_cost = contract_instance .methods() .initialize_counter(42) // Build the ABI call .estimate_transaction_cost(tolerance, block_horizon) // Get estimated transaction cost .await?; // ANCHOR_END: contract_call_cost_estimation let expected_script_gas = 2340; let expected_total_gas = 8592; assert_eq!(transaction_cost.script_gas, expected_script_gas); assert_eq!(transaction_cost.total_gas, expected_total_gas); Ok(()) } #[tokio::test] async fn deploy_with_parameters() -> std::result::Result<(), Box<dyn std::error::Error>> { use fuels::{prelude::*, tx::StorageSlot, types::Bytes32}; use rand::prelude::{Rng, SeedableRng, StdRng}; let wallet = launch_provider_and_get_wallet().await?; let contract_id_1 = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; // ANCHOR: deploy_with_parameters // Optional: Add `Salt` let rng = &mut StdRng::seed_from_u64(2322u64); let salt: [u8; 32] = rng.r#gen(); // Optional: Configure storage let key = Bytes32::from([1u8; 32]); let value = Bytes32::from([2u8; 32]); let storage_slot = StorageSlot::new(key, value); let storage_configuration = StorageConfiguration::default().add_slot_overrides([storage_slot]); let configuration = LoadConfiguration::default() .with_storage_configuration(storage_configuration) .with_salt(salt); // Optional: Configure deployment parameters let tx_policies = TxPolicies::default() .with_tip(1) .with_script_gas_limit(1_000_000) .with_maturity(0) .with_expiration(10_000); let contract_id_2 = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", configuration, )? .deploy(&wallet, tx_policies) .await? .contract_id; println!("Contract deployed @ {contract_id_2}"); // ANCHOR_END: deploy_with_parameters assert_ne!(contract_id_1, contract_id_2); // ANCHOR: use_deployed_contract // This will generate your contract's methods onto `MyContract`. // This means an instance of `MyContract` will have access to all // your contract's methods that are running on-chain! // ANCHOR: abigen_example abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); // ANCHOR_END: abigen_example // This is an instance of your contract which you can use to make calls to your functions let contract_instance = MyContract::new(contract_id_2, wallet); let response = contract_instance .methods() .initialize_counter(42) // Build the ABI call .call() // Perform the network call .await?; assert_eq!(42, response.value); let response = contract_instance .methods() .increment_counter(10) .call() .await?; assert_eq!(52, response.value); // ANCHOR_END: use_deployed_contract // ANCHOR: submit_response_contract let response = contract_instance .methods() .initialize_counter(42) .submit() .await?; tokio::time::sleep(Duration::from_millis(500)).await; let value = response.response().await?.value; // ANCHOR_END: submit_response_contract assert_eq!(42, value); Ok(()) } #[tokio::test] async fn deploy_with_multiple_wallets() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let wallets = launch_custom_provider_and_get_wallets(WalletsConfig::default(), None, None).await?; let contract_id_1 = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallets[0], TxPolicies::default()) .await? .contract_id; let contract_instance_1 = MyContract::new(contract_id_1, wallets[0].clone()); let response = contract_instance_1 .methods() .initialize_counter(42) .call() .await?; assert_eq!(42, response.value); let contract_id_2 = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default().with_salt([1; 32]), )? .deploy(&wallets[1], TxPolicies::default()) .await? .contract_id; let contract_instance_2 = MyContract::new(contract_id_2, wallets[1].clone()); let response = contract_instance_2 .methods() .initialize_counter(42) // Build the ABI call .call() .await?; assert_eq!(42, response.value); Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn contract_tx_and_call_params() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let contract_id = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; // ANCHOR: tx_policies let contract_methods = MyContract::new(contract_id, wallet.clone()).methods(); let tx_policies = TxPolicies::default() .with_tip(1) .with_script_gas_limit(1_000_000) .with_maturity(0) .with_expiration(10_000); let response = contract_methods .initialize_counter(42) // Our contract method .with_tx_policies(tx_policies) // Chain the tx policies .call() // Perform the contract call .await?; // This is an async call, `.await` it. // ANCHOR_END: tx_policies // ANCHOR: tx_policies_default let response = contract_methods .initialize_counter(42) .with_tx_policies(TxPolicies::default()) .call() .await?; // ANCHOR_END: tx_policies_default // ANCHOR: call_parameters let contract_methods = MyContract::new(contract_id, wallet.clone()).methods(); let tx_policies = TxPolicies::default(); // Forward 1_000_000 coin amount of base asset_id // this is a big number for checking that amount can be a u64 let call_params = CallParameters::default().with_amount(1_000_000); let response = contract_methods .get_msg_amount() // Our contract method. .with_tx_policies(tx_policies) // Chain the tx policies. .call_params(call_params)? // Chain the call parameters. .call() // Perform the contract call. .await?; // ANCHOR_END: call_parameters // ANCHOR: call_parameters_default let response = contract_methods .initialize_counter(42) .call_params(CallParameters::default())? .call() .await?; // ANCHOR_END: call_parameters_default Ok(()) } #[tokio::test] #[allow(unused_variables)] #[cfg(any(not(feature = "fuel-core-lib"), feature = "rocksdb"))] async fn token_ops_tests() -> Result<()> { use fuels::{prelude::*, types::SubAssetId}; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/token_ops/out/release/token_ops-abi.json" )); let temp_dir = tempfile::tempdir().expect("failed to make tempdir"); let temp_dir_name = temp_dir .path() .file_name() .expect("failed to get file name") .to_string_lossy() .to_string(); let temp_database_path = temp_dir.path().join("db"); let node_config = NodeConfig { starting_gas_price: 1100, database_type: DbType::RocksDb(Some(temp_database_path)), historical_execution: true, ..NodeConfig::default() }; let chain_config = ChainConfig { chain_name: temp_dir_name, ..ChainConfig::default() }; let wallets = launch_custom_provider_and_get_wallets( WalletsConfig::default(), Some(node_config), Some(chain_config), ) .await?; let wallet = wallets.first().expect("is there"); let contract_id = Contract::load_from( "../../e2e/sway/contracts/token_ops/out/release/token_ops.bin", LoadConfiguration::default(), )? .deploy_if_not_exists(wallet, TxPolicies::default()) .await? .contract_id; let contract_methods = MyContract::new(contract_id, wallet.clone()).methods(); // ANCHOR: simulate // you would mint 100 coins if the transaction wasn't simulated let counter = contract_methods .mint_coins(100) .simulate(Execution::realistic()) .await?; // ANCHOR_END: simulate { // ANCHOR: simulate_read_state // you don't need any funds to read state let balance = contract_methods .get_balance(contract_id, AssetId::zeroed()) .simulate(Execution::state_read_only()) .await? .value; // ANCHOR_END: simulate_read_state } { let provider = wallet.provider(); provider.produce_blocks(2, None).await?; let block_height = provider.latest_block_height().await?; // ANCHOR: simulate_read_state_at_height let balance = contract_methods .get_balance(contract_id, AssetId::zeroed()) .simulate(Execution::state_read_only().at_height(block_height)) .await? .value; // ANCHOR_END: simulate_read_state_at_height } let response = contract_methods.mint_coins(1_000_000).call().await?; // ANCHOR: variable_outputs let address = wallet.address(); let asset_id = contract_id.asset_id(&SubAssetId::zeroed()); // withdraw some tokens to wallet let response = contract_methods .transfer(1_000_000, asset_id, address.into()) .with_variable_output_policy(VariableOutputPolicy::Exactly(1)) .call() .await?; // ANCHOR_END: variable_outputs Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn dependency_estimation() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/lib_contract_caller/out/release/lib_contract_caller-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let called_contract_id: ContractId = Contract::load_from( "../../e2e/sway/contracts/lib_contract/out/release/lib_contract.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; let bin_path = "../../e2e/sway/contracts/lib_contract_caller/out/release/lib_contract_caller.bin"; let caller_contract_id = Contract::load_from(bin_path, LoadConfiguration::default())? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; let contract_methods = MyContract::new(caller_contract_id, wallet.clone()).methods(); // ANCHOR: dependency_estimation_fail let address = wallet.address(); let amount = 100; let response = contract_methods .mint_then_increment_from_contract(called_contract_id, amount, address.into()) .call() .await; assert!(matches!( response, Err(Error::Transaction(Reason::Failure { .. })) )); // ANCHOR_END: dependency_estimation_fail // ANCHOR: dependency_estimation_manual let response = contract_methods .mint_then_increment_from_contract(called_contract_id, amount, address.into()) .with_variable_output_policy(VariableOutputPolicy::Exactly(1)) .with_contract_ids(&[called_contract_id]) .call() .await?; // ANCHOR_END: dependency_estimation_manual let asset_id = caller_contract_id.asset_id(&SubAssetId::zeroed()); let balance = wallet.get_asset_balance(&asset_id).await?; assert_eq!(balance, amount as u128); // ANCHOR: dependency_estimation let response = contract_methods .mint_then_increment_from_contract(called_contract_id, amount, address.into()) .with_variable_output_policy(VariableOutputPolicy::EstimateMinimum) .determine_missing_contracts() .await? .call() .await?; // ANCHOR_END: dependency_estimation let balance = wallet.get_asset_balance(&asset_id).await?; assert_eq!(balance, 2 * amount as u128); Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn get_contract_outputs() -> Result<()> { use fuels::prelude::*; // ANCHOR: deployed_contracts abigen!(Contract( name = "MyContract", // Replace with your contract ABI.json path abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let wallet_original = launch_provider_and_get_wallet().await?; let wallet = wallet_original.clone(); let contract_id: ContractId = "0x65b6a3d081966040bbccbb7f79ac91b48c635729c59a4c02f15ae7da999b32d3".parse()?; let connected_contract_instance = MyContract::new(contract_id, wallet); // ANCHOR_END: deployed_contracts Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn call_params_gas() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let contract_id = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; let contract_methods = MyContract::new(contract_id, wallet.clone()).methods(); // ANCHOR: call_params_gas // Set the transaction `gas_limit` to 1_000_000 and `gas_forwarded` to 4300 to specify that // the contract call transaction may consume up to 1_000_000 gas, while the actual call may // only use 4300 gas let tx_policies = TxPolicies::default().with_script_gas_limit(1_000_000); let call_params = CallParameters::default().with_gas_forwarded(4300); let response = contract_methods .get_msg_amount() // Our contract method. .with_tx_policies(tx_policies) // Chain the tx policies. .call_params(call_params)? // Chain the call parameters. .call() // Perform the contract call. .await?; // ANCHOR_END: call_params_gas Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn multi_call_example() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let contract_id = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; // ANCHOR: multi_call_prepare let contract_methods = MyContract::new(contract_id, wallet.clone()).methods(); let call_handler_1 = contract_methods.initialize_counter(42); let call_handler_2 = contract_methods.get_array([42; 2]); // ANCHOR_END: multi_call_prepare // ANCHOR: multi_call_build let multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2) .with_tx_policies(TxPolicies::default()); // ANCHOR_END: multi_call_build let multi_call_handler_tmp = multi_call_handler.clone(); // ANCHOR: multi_call_values let (counter, array): (u64, [u64; 2]) = multi_call_handler.call().await?.value; // ANCHOR_END: multi_call_values let multi_call_handler = multi_call_handler_tmp.clone(); // ANCHOR: multi_contract_call_response let response = multi_call_handler.call::<(u64, [u64; 2])>().await?; // ANCHOR_END: multi_contract_call_response assert_eq!(counter, 42); assert_eq!(array, [42; 2]); let multi_call_handler = multi_call_handler_tmp.clone(); // ANCHOR: submit_response_multicontract let submitted_tx = multi_call_handler.submit().await?; tokio::time::sleep(Duration::from_millis(500)).await; let (counter, array): (u64, [u64; 2]) = submitted_tx.response().await?.value; // ANCHOR_END: submit_response_multicontract assert_eq!(counter, 42); assert_eq!(array, [42; 2]); Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn multi_call_cost_estimation() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let wallet = launch_provider_and_get_wallet().await?; let contract_id = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet, TxPolicies::default()) .await? .contract_id; let contract_methods = MyContract::new(contract_id, wallet.clone()).methods(); // ANCHOR: multi_call_cost_estimation let call_handler_1 = contract_methods.initialize_counter(42); let call_handler_2 = contract_methods.get_array([42; 2]); let multi_call_handler = CallHandler::new_multi_call(wallet.clone()) .add_call(call_handler_1) .add_call(call_handler_2); let tolerance = Some(0.0); let block_horizon = Some(1); let transaction_cost = multi_call_handler .estimate_transaction_cost(tolerance, block_horizon) // Get estimated transaction cost .await?; // ANCHOR_END: multi_call_cost_estimation let expected_script_gas = 3832; let expected_total_gas = 10_661; assert_eq!(transaction_cost.script_gas, expected_script_gas); assert_eq!(transaction_cost.total_gas, expected_total_gas); Ok(()) } #[tokio::test] #[allow(unused_variables)] async fn connect_wallet() -> Result<()> { use fuels::prelude::*; abigen!(Contract( name = "MyContract", abi = "e2e/sway/contracts/contract_test/out/release/contract_test-abi.json" )); let config = WalletsConfig::new(Some(2), Some(1), Some(DEFAULT_COIN_AMOUNT)); let mut wallets = launch_custom_provider_and_get_wallets(config, None, None).await?; let wallet_1 = wallets.pop().unwrap(); let wallet_2 = wallets.pop().unwrap(); let contract_id = Contract::load_from( "../../e2e/sway/contracts/contract_test/out/release/contract_test.bin", LoadConfiguration::default(), )? .deploy(&wallet_1, TxPolicies::default()) .await? .contract_id; // ANCHOR: connect_wallet // Create contract instance with wallet_1 let contract_instance = MyContract::new(contract_id, wallet_1.clone()); // Perform contract call with wallet_2 let response = contract_instance .with_account(wallet_2) // Connect wallet_2 .methods() // Get contract methods .get_msg_amount() // Our contract method .call() // Perform the contract call. .await?; // This is an async call, `.await` for it. // ANCHOR_END: connect_wallet Ok(()) } #[tokio::test] async fn custom_assets_example() -> Result<()> { use fuels::prelude::*; setup_program_test!( Wallets("wallet", "wallet_2"), Abigen(Contract( name = "MyContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "MyContract", wallet = "wallet" ) ); let some_addr: Address = thread_rng().r#gen(); // ANCHOR: add_custom_assets let amount = 1000; let _ = contract_instance .methods() .initialize_counter(42) .add_custom_asset(AssetId::zeroed(), amount, Some(some_addr)) .call() .await?; // ANCHOR_END: add_custom_assets let custom_inputs = vec![]; let custom_outputs = vec![]; // ANCHOR: add_custom_inputs_outputs let _ = contract_instance .methods() .initialize_counter(42) .with_inputs(custom_inputs) .with_outputs(custom_outputs) .add_signer(wallet_2.signer().clone()) .call() .await?; // ANCHOR_END: add_custom_inputs_outputs Ok(()) } #[tokio::test] async fn low_level_call_example() -> Result<()> { use fuels::{core::codec::calldata, prelude::*, types::SizedAsciiString}; setup_program_test!( Wallets("wallet"), Abigen( Contract( name = "MyCallerContract", project = "e2e/sway/contracts/low_level_caller" ), Contract( name = "MyTargetContract", project = "e2e/sway/contracts/contract_test" ), ), Deploy( name = "caller_contract_instance", contract = "MyCallerContract", wallet = "wallet" ), Deploy( name = "target_contract_instance", contract = "MyTargetContract", wallet = "wallet" ), ); // ANCHOR: low_level_call let function_selector = encode_fn_selector("set_value_multiple_complex"); let call_data = calldata!( MyStruct { a: true, b: [1, 2, 3], }, SizedAsciiString::<4>::try_from("fuel")? )?; caller_contract_instance .methods() .call_low_level_call( target_contract_instance.id(), Bytes(function_selector), Bytes(call_data), ) .determine_missing_contracts() .await? .call() .await?; // ANCHOR_END: low_level_call let result_uint = target_contract_instance .methods() .read_counter() .call() .await .unwrap() .value; let result_bool = target_contract_instance .methods() .get_bool_value() .call() .await .unwrap() .value; let result_str = target_contract_instance .methods() .get_str_value() .call() .await .unwrap() .value; assert_eq!(result_uint, 2); assert!(result_bool); assert_eq!(result_str, "fuel"); Ok(()) } #[tokio::test] async fn configure_the_return_value_decoder() -> Result<()> { use fuels::prelude::*; setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "MyContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "MyContract", wallet = "wallet" ) ); // ANCHOR: contract_decoder_config let _ = contract_instance .methods() .initialize_counter(42) .with_decoder_config(DecoderConfig { max_depth: 10, max_tokens: 2_000, }) .call() .await?; // ANCHOR_END: contract_decoder_config Ok(()) } #[tokio::test] async fn storage_slots_override() -> Result<()> { { // ANCHOR: storage_slots_override use fuels::{programs::contract::Contract, tx::StorageSlot}; let slot_override = StorageSlot::new([1; 32].into(), [2; 32].into()); let storage_config = StorageConfiguration::default().add_slot_overrides([slot_override]); let load_config = LoadConfiguration::default().with_storage_configuration(storage_config); let _: Result<_> = Contract::load_from("...", load_config); // ANCHOR_END: storage_slots_override } { // ANCHOR: storage_slots_disable_autoload use fuels::programs::contract::Contract; let storage_config = StorageConfiguration::default().with_autoload(false); let load_config = LoadConfiguration::default().with_storage_configuration(storage_config); let _: Result<_> = Contract::load_from("...", load_config); // ANCHOR_END: storage_slots_disable_autoload } Ok(()) } #[tokio::test] async fn contract_custom_call() -> Result<()> { use fuels::prelude::*; setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "TestContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "TestContract", wallet = "wallet" ), ); let provider = wallet.provider(); let counter = 42; // ANCHOR: contract_call_tb let call_handler = contract_instance.methods().initialize_counter(counter); let mut tb = call_handler.transaction_builder().await?; // customize the builder... wallet.adjust_for_fee(&mut tb, 0).await?; wallet.add_witnesses(&mut tb)?; let tx = tb.build(provider).await?; let tx_id = provider.send_transaction(tx).await?; tokio::time::sleep(Duration::from_millis(500)).await; let tx_status = provider.tx_status(&tx_id).await?; let response = call_handler.get_response(tx_status)?; assert_eq!(counter, response.value); // ANCHOR_END: contract_call_tb Ok(()) } #[tokio::test] async fn configure_encoder_config() -> Result<()> { use fuels::prelude::*; setup_program_test!( Wallets("wallet"), Abigen(Contract( name = "MyContract", project = "e2e/sway/contracts/contract_test" )), Deploy( name = "contract_instance", contract = "MyContract", wallet = "wallet" ) ); // ANCHOR: contract_encoder_config let _ = contract_instance .with_encoder_config(EncoderConfig { max_depth: 10, max_tokens: 2_000, }) .methods() .initialize_counter(42) .call() .await?;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
true
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/examples/predicates/src/lib.rs
examples/predicates/src/lib.rs
#[cfg(test)] mod tests { use fuels::{ accounts::{Account, predicate::Predicate, signers::private_key::PrivateKeySigner}, crypto::Message, prelude::*, types::B512, }; use rand::thread_rng; #[tokio::test] async fn predicate_example() -> Result<()> { // ANCHOR: predicate_signers let wallet_signer = PrivateKeySigner::new( "0x862512a2363db2b3a375c0d4bbbd27172180d89f23f2e259bac850ab02619301".parse()?, ); let wallet2_signer = PrivateKeySigner::new( "0x37fa81c84ccd547c30c176b118d5cb892bdb113e8e80141f266519422ef9eefd".parse()?, ); let wallet3_signer = PrivateKeySigner::new( "0x976e5c3fa620092c718d852ca703b6da9e3075b9f2ecb8ed42d9f746bf26aafb".parse()?, ); let receiver_signer = PrivateKeySigner::random(&mut thread_rng()); // ANCHOR_END: predicate_signers // ANCHOR: predicate_coins let asset_id = AssetId::zeroed(); let num_coins = 32; let amount = 64; let initial_balance = amount * num_coins; let all_coins = [ &wallet_signer, &wallet2_signer, &wallet3_signer, &receiver_signer, ] .iter() .flat_map(|signer| setup_single_asset_coins(signer.address(), asset_id, num_coins, amount)) .collect::<Vec<_>>(); let provider = setup_test_provider(all_coins, vec![], None, None).await?; let wallet = Wallet::new(wallet_signer, provider.clone()); let wallet2 = Wallet::new(wallet2_signer, provider.clone()); let wallet3 = Wallet::new(wallet3_signer, provider.clone()); let receiver = Wallet::new(receiver_signer, provider.clone()); // ANCHOR_END: predicate_coins let data_to_sign = Message::new([0; 32]); let signature1: B512 = wallet .signer() .sign(data_to_sign) .await? .as_ref() .try_into()?; let signature2: B512 = wallet2 .signer() .sign(data_to_sign) .await? .as_ref() .try_into()?; let signature3: B512 = wallet3 .signer() .sign(data_to_sign) .await? .as_ref() .try_into()?; let signatures = [signature1, signature2, signature3]; // ANCHOR: predicate_load abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/signatures/out/release/signatures-abi.json" )); let predicate_data = MyPredicateEncoder::default().encode_data(signatures)?; let code_path = "../../e2e/sway/predicates/signatures/out/release/signatures.bin"; let predicate: Predicate = Predicate::load_from(code_path)? .with_provider(provider) .with_data(predicate_data); // ANCHOR_END: predicate_load // ANCHOR: predicate_receive let amount_to_predicate = 500; wallet .transfer( predicate.address(), amount_to_predicate, asset_id, TxPolicies::default(), ) .await?; let predicate_balance = predicate.get_asset_balance(&asset_id).await?; assert_eq!(predicate_balance, amount_to_predicate as u128); // ANCHOR_END: predicate_receive // ANCHOR: predicate_spend let amount_to_receiver = 300; predicate .transfer( receiver.address(), amount_to_receiver, asset_id, TxPolicies::default(), ) .await?; let receiver_balance_after = receiver.get_asset_balance(&asset_id).await?; assert_eq!( (initial_balance + amount_to_receiver) as u128, receiver_balance_after ); // ANCHOR_END: predicate_spend Ok(()) } #[tokio::test] async fn predicate_data_example() -> Result<()> { // ANCHOR: predicate_data_setup let asset_id = AssetId::zeroed(); let wallets_config = WalletsConfig::new_multiple_assets( 2, vec![AssetConfig { id: asset_id, num_coins: 1, coin_amount: 1_000, }], ); let wallets = &launch_custom_provider_and_get_wallets(wallets_config, None, None).await?; let first_wallet = &wallets[0]; let second_wallet = &wallets[1]; abigen!(Predicate( name = "MyPredicate", abi = "e2e/sway/predicates/basic_predicate/out/release/basic_predicate-abi.json" )); // ANCHOR_END: predicate_data_setup // ANCHOR: with_predicate_data let predicate_data = MyPredicateEncoder::default().encode_data(4096, 4096)?; let code_path = "../../e2e/sway/predicates/basic_predicate/out/release/basic_predicate.bin"; let predicate: Predicate = Predicate::load_from(code_path)? .with_provider(first_wallet.provider().clone()) .with_data(predicate_data); // ANCHOR_END: with_predicate_data // ANCHOR: predicate_data_lock_amount // First wallet transfers amount to predicate. first_wallet .transfer(predicate.address(), 500, asset_id, TxPolicies::default()) .await?; // Check predicate balance. let balance = predicate.get_asset_balance(&AssetId::zeroed()).await?; assert_eq!(balance, 500); // ANCHOR_END: predicate_data_lock_amount // ANCHOR: predicate_data_unlock let amount_to_unlock = 300; predicate .transfer( second_wallet.address(), amount_to_unlock, asset_id, TxPolicies::default(), ) .await?; // Second wallet balance is updated. let balance = second_wallet.get_asset_balance(&AssetId::zeroed()).await?; assert_eq!(balance, 1300); // ANCHOR_END: predicate_data_unlock Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-test-helpers/src/lib.rs
packages/fuels-test-helpers/src/lib.rs
//! Testing helpers/utilities for Fuel SDK. extern crate core; #[cfg(feature = "fuels-accounts")] pub use accounts::*; use fuel_tx::{Bytes32, ConsensusParameters, ContractParameters, TxParameters, UtxoId}; use fuel_types::{AssetId, Nonce}; use fuels_accounts::provider::Provider; use fuels_core::types::{ Address, coin::Coin, errors::Result, message::{Message, MessageStatus}, }; pub use node_types::*; use rand::{Fill, Rng, SeedableRng, rngs::StdRng}; use utils::{into_coin_configs, into_message_configs}; pub use wallets_config::*; mod node_types; #[cfg(not(feature = "fuel-core-lib"))] pub(crate) mod fuel_bin_service; #[cfg(feature = "fuels-accounts")] mod accounts; pub use service::*; mod service; mod utils; mod wallets_config; /// Create a vector of `num_asset`*`coins_per_asset` UTXOs and a vector of the unique corresponding /// asset IDs. `AssetId`. Each UTXO (=coin) contains `amount_per_coin` amount of a random asset. The /// output of this function can be used with `setup_test_provider` to get a client with some /// pre-existing coins, with `num_asset` different asset ids. Note that one of the assets is the /// base asset to pay for gas. pub fn setup_multiple_assets_coins( owner: Address, num_asset: u64, coins_per_asset: u64, amount_per_coin: u64, ) -> (Vec<Coin>, Vec<AssetId>) { let mut rng = rand::thread_rng(); // Create `num_asset-1` asset ids so there is `num_asset` in total with the base asset let asset_ids = (0..(num_asset - 1)) .map(|_| { let mut random_asset_id = AssetId::zeroed(); random_asset_id .try_fill(&mut rng) .expect("failed to fill with random data"); random_asset_id }) .chain([AssetId::zeroed()]) .collect::<Vec<AssetId>>(); let coins = asset_ids .iter() .flat_map(|id| setup_single_asset_coins(owner, *id, coins_per_asset, amount_per_coin)) .collect::<Vec<Coin>>(); (coins, asset_ids) } /// Create a vector of UTXOs with the provided AssetIds, num_coins, and amount_per_coin pub fn setup_custom_assets_coins(owner: Address, assets: &[AssetConfig]) -> Vec<Coin> { assets .iter() .flat_map(|asset| { setup_single_asset_coins(owner, asset.id, asset.num_coins, asset.coin_amount) }) .collect::<Vec<Coin>>() } /// Create a vector of `num_coins` UTXOs containing `amount_per_coin` amount of asset `asset_id`. /// The output of this function can be used with `setup_test_provider` to get a client with some /// pre-existing coins, but with only one asset ID. pub fn setup_single_asset_coins( owner: Address, asset_id: AssetId, num_coins: u64, amount_per_coin: u64, ) -> Vec<Coin> { let mut rng = rand::thread_rng(); let coins: Vec<Coin> = (1..=num_coins) .map(|_i| { let mut r = Bytes32::zeroed(); r.try_fill(&mut rng) .expect("failed to fill with random data"); let utxo_id = UtxoId::new(r, 0); Coin { owner, utxo_id, amount: amount_per_coin, asset_id, } }) .collect(); coins } pub fn setup_single_message( sender: Address, recipient: Address, amount: u64, nonce: Nonce, data: Vec<u8>, ) -> Message { Message { sender, recipient, nonce, amount, data, da_height: 0, status: MessageStatus::Unspent, } } pub async fn setup_test_provider( coins: Vec<Coin>, messages: Vec<Message>, node_config: Option<NodeConfig>, chain_config: Option<ChainConfig>, ) -> Result<Provider> { let node_config = node_config.unwrap_or_default(); let chain_config = chain_config.unwrap_or_else(testnet_chain_config); let coin_configs = into_coin_configs(coins); let message_configs = into_message_configs(messages); let state_config = StateConfig { coins: coin_configs, messages: message_configs, ..StateConfig::local_testnet() }; let srv = FuelService::start(node_config, chain_config, state_config).await?; let address = srv.bound_address(); tokio::spawn(async move { let _own_the_handle = srv; let () = futures::future::pending().await; }); Provider::from(address).await } // Testnet ChainConfig with increased tx size and contract size limits fn testnet_chain_config() -> ChainConfig { let mut consensus_parameters = ConsensusParameters::default(); let tx_params = TxParameters::default().with_max_size(10_000_000); // on a best effort basis, if we're given an old core we won't fail only because we couldn't // set the limit here let _ = consensus_parameters.set_block_transaction_size_limit(10_000_000); let contract_params = ContractParameters::default().with_contract_max_size(1_000_000); consensus_parameters.set_tx_params(tx_params); consensus_parameters.set_contract_params(contract_params); ChainConfig { consensus_parameters, ..ChainConfig::local_testnet() } } pub fn generate_random_salt() -> [u8; 32] { StdRng::from_entropy().r#gen() } #[cfg(test)] mod tests { use std::net::{Ipv4Addr, SocketAddr}; use fuel_tx::{ConsensusParameters, ContractParameters, FeeParameters, TxParameters}; use super::*; #[tokio::test] async fn test_setup_single_asset_coins() -> Result<()> { let mut rng = rand::thread_rng(); let address = rng.r#gen(); let mut asset_id = AssetId::zeroed(); asset_id .try_fill(&mut rng) .expect("failed to fill with random data"); let number_of_coins = 11; let amount_per_coin = 10; let coins = setup_single_asset_coins(address, asset_id, number_of_coins, amount_per_coin); assert_eq!(coins.len() as u64, number_of_coins); for coin in coins { assert_eq!(coin.asset_id, asset_id); assert_eq!(coin.amount, amount_per_coin); assert_eq!(coin.owner, address); } Ok(()) } #[tokio::test] async fn test_setup_multiple_assets_coins() -> Result<()> { let mut rng = rand::thread_rng(); let address = rng.r#gen(); let number_of_assets = 7; let coins_per_asset = 10; let amount_per_coin = 13; let (coins, unique_asset_ids) = setup_multiple_assets_coins( address, number_of_assets, coins_per_asset, amount_per_coin, ); assert_eq!(coins.len() as u64, number_of_assets * coins_per_asset); assert_eq!(unique_asset_ids.len() as u64, number_of_assets); // Check that the wallet has base assets to pay for gas assert!( unique_asset_ids .iter() .any(|&asset_id| asset_id == AssetId::zeroed()) ); for asset_id in unique_asset_ids { let coins_asset_id: Vec<Coin> = coins .clone() .into_iter() .filter(|c| c.asset_id == asset_id) .collect(); assert_eq!(coins_asset_id.len() as u64, coins_per_asset); for coin in coins_asset_id { assert_eq!(coin.owner, address); assert_eq!(coin.amount, amount_per_coin); } } Ok(()) } #[tokio::test] async fn test_setup_custom_assets_coins() -> Result<()> { let mut rng = rand::thread_rng(); let address = rng.r#gen(); let asset_base = AssetConfig { id: AssetId::zeroed(), num_coins: 2, coin_amount: 4, }; let mut asset_id_1 = AssetId::zeroed(); asset_id_1 .try_fill(&mut rng) .expect("failed to fill with random data"); let asset_1 = AssetConfig { id: asset_id_1, num_coins: 6, coin_amount: 8, }; let mut asset_id_2 = AssetId::zeroed(); asset_id_2 .try_fill(&mut rng) .expect("failed to fill with random data"); let asset_2 = AssetConfig { id: asset_id_2, num_coins: 10, coin_amount: 12, }; let assets = vec![asset_base, asset_1, asset_2]; let coins = setup_custom_assets_coins(address, &assets); for asset in assets { let coins_asset_id: Vec<Coin> = coins .clone() .into_iter() .filter(|c| c.asset_id == asset.id) .collect(); assert_eq!(coins_asset_id.len() as u64, asset.num_coins); for coin in coins_asset_id { assert_eq!(coin.owner, address); assert_eq!(coin.amount, asset.coin_amount); } } Ok(()) } #[tokio::test] async fn test_setup_test_provider_custom_config() -> Result<()> { let socket = SocketAddr::new(Ipv4Addr::new(127, 0, 0, 1).into(), 4000); let config = NodeConfig { addr: socket, ..NodeConfig::default() }; let provider = setup_test_provider(vec![], vec![], Some(config.clone()), None).await?; let node_info = provider .node_info() .await .expect("Failed to retrieve node info!"); assert_eq!(provider.url(), format!("http://127.0.0.1:4000")); assert_eq!(node_info.utxo_validation, config.utxo_validation); Ok(()) } #[tokio::test] async fn test_setup_test_client_consensus_parameters_config() -> Result<()> { let tx_params = TxParameters::default() .with_max_gas_per_tx(2) .with_max_inputs(58); let fee_params = FeeParameters::default().with_gas_per_byte(2); let contract_params = ContractParameters::default().with_max_storage_slots(83); let mut consensus_parameters = ConsensusParameters::default(); consensus_parameters.set_tx_params(tx_params); consensus_parameters.set_fee_params(fee_params); consensus_parameters.set_contract_params(contract_params); let chain_config = ChainConfig { consensus_parameters: consensus_parameters.clone(), ..ChainConfig::default() }; let provider = setup_test_provider(vec![], vec![], None, Some(chain_config)).await?; let retrieved_parameters = provider.consensus_parameters().await?; assert_eq!(retrieved_parameters, consensus_parameters); Ok(()) } #[tokio::test] async fn test_chain_config_and_consensus_parameters() -> Result<()> { let max_inputs = 123; let gas_per_byte = 456; let mut consensus_parameters = ConsensusParameters::default(); let tx_params = TxParameters::default().with_max_inputs(max_inputs); consensus_parameters.set_tx_params(tx_params); let fee_params = FeeParameters::default().with_gas_per_byte(gas_per_byte); consensus_parameters.set_fee_params(fee_params); let chain_name = "fuel-0".to_string(); let chain_config = ChainConfig { chain_name: chain_name.clone(), consensus_parameters, ..ChainConfig::local_testnet() }; let provider = setup_test_provider(vec![], vec![], None, Some(chain_config)).await?; let chain_info = provider.chain_info().await?; assert_eq!(chain_info.name, chain_name); assert_eq!( chain_info.consensus_parameters.tx_params().max_inputs(), max_inputs ); assert_eq!( chain_info.consensus_parameters.fee_params().gas_per_byte(), gas_per_byte ); Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-test-helpers/src/node_types.rs
packages/fuels-test-helpers/src/node_types.rs
use std::{ net::{Ipv4Addr, SocketAddr}, path::PathBuf, time::Duration, }; pub use fuel_core_chain_config::{ChainConfig, StateConfig}; #[derive(Clone, Debug)] pub enum Trigger { Instant, Never, Interval { block_time: Duration }, Open { period: Duration }, } #[cfg(feature = "fuel-core-lib")] impl From<Trigger> for fuel_core_poa::Trigger { fn from(value: Trigger) -> Self { match value { Trigger::Instant => fuel_core_poa::Trigger::Instant, Trigger::Never => fuel_core_poa::Trigger::Never, Trigger::Interval { block_time } => fuel_core_poa::Trigger::Interval { block_time }, Trigger::Open { period } => fuel_core_poa::Trigger::Open { period }, } } } #[derive(Clone, Debug)] pub enum DbType { InMemory, RocksDb(Option<PathBuf>), } #[cfg(feature = "fuel-core-lib")] impl From<DbType> for fuel_core::service::DbType { fn from(value: DbType) -> Self { match value { DbType::InMemory => fuel_core::service::DbType::InMemory, DbType::RocksDb(..) => fuel_core::service::DbType::RocksDb, } } } #[derive(Clone, Debug)] pub struct NodeConfig { pub addr: SocketAddr, pub max_database_cache_size: Option<usize>, pub database_type: DbType, pub utxo_validation: bool, pub historical_execution: bool, pub debug: bool, pub block_production: Trigger, pub vm_backtrace: bool, pub silent: bool, pub starting_gas_price: u64, pub graphql_request_body_bytes_limit: u64, } impl Default for NodeConfig { fn default() -> Self { Self { addr: SocketAddr::new(Ipv4Addr::new(127, 0, 0, 1).into(), 0), max_database_cache_size: None, database_type: DbType::InMemory, utxo_validation: true, historical_execution: false, debug: true, block_production: Trigger::Instant, vm_backtrace: false, silent: true, starting_gas_price: 1, graphql_request_body_bytes_limit: u64::MAX, } } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-test-helpers/src/wallets_config.rs
packages/fuels-test-helpers/src/wallets_config.rs
use fuel_types::AssetId; // These constants define the default number of wallets to be setup, // the number of coins per wallet and the amount per coin pub const DEFAULT_NUM_WALLETS: u64 = 10; pub const DEFAULT_NUM_COINS: u64 = 1; pub const DEFAULT_COIN_AMOUNT: u64 = 1_000_000_000; #[derive(Debug, Clone)] pub struct AssetConfig { pub id: AssetId, pub num_coins: u64, pub coin_amount: u64, } #[derive(Debug)] pub struct WalletsConfig { num_wallets: u64, assets: Vec<AssetConfig>, } impl WalletsConfig { pub fn new(num_wallets: Option<u64>, num_coins: Option<u64>, coin_amount: Option<u64>) -> Self { Self { num_wallets: num_wallets.unwrap_or(DEFAULT_NUM_WALLETS), assets: vec![AssetConfig { id: AssetId::zeroed(), num_coins: num_coins.unwrap_or(DEFAULT_NUM_COINS), coin_amount: coin_amount.unwrap_or(DEFAULT_COIN_AMOUNT), }], } } pub fn new_multiple_assets(num_wallets: u64, assets: Vec<AssetConfig>) -> Self { Self { num_wallets, assets, } } pub fn num_wallets(&self) -> u64 { self.num_wallets } pub fn assets(&self) -> &[AssetConfig] { &self.assets[..] } } impl Default for WalletsConfig { fn default() -> Self { Self { num_wallets: DEFAULT_NUM_WALLETS, assets: vec![AssetConfig { id: AssetId::zeroed(), num_coins: DEFAULT_NUM_COINS, coin_amount: DEFAULT_COIN_AMOUNT, }], } } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-test-helpers/src/service.rs
packages/fuels-test-helpers/src/service.rs
#[cfg(feature = "fuel-core-lib")] use fuel_core::service::{Config as ServiceConfig, FuelService as CoreFuelService}; use fuel_core_chain_config::{ChainConfig, StateConfig}; use fuel_core_services::State; use fuels_core::types::errors::{Result, error}; use std::net::SocketAddr; use crate::NodeConfig; #[cfg(not(feature = "fuel-core-lib"))] use crate::fuel_bin_service::FuelService as BinFuelService; pub struct FuelService { #[cfg(feature = "fuel-core-lib")] service: CoreFuelService, #[cfg(not(feature = "fuel-core-lib"))] service: BinFuelService, bound_address: SocketAddr, } impl FuelService { pub async fn start( node_config: NodeConfig, chain_config: ChainConfig, state_config: StateConfig, ) -> Result<Self> { #[cfg(feature = "fuel-core-lib")] let service = { let config = Self::service_config(node_config, chain_config, state_config); CoreFuelService::new_node(config) .await .map_err(|err| error!(Other, "{err}"))? }; #[cfg(not(feature = "fuel-core-lib"))] let service = BinFuelService::new_node(node_config, chain_config, state_config).await?; let bound_address = service.bound_address; Ok(FuelService { service, bound_address, }) } pub async fn stop(&self) -> Result<State> { #[cfg(feature = "fuel-core-lib")] let result = self.service.send_stop_signal_and_await_shutdown().await; #[cfg(not(feature = "fuel-core-lib"))] let result = self.service.stop(); result.map_err(|err| error!(Other, "{err}")) } pub fn bound_address(&self) -> SocketAddr { self.bound_address } #[cfg(feature = "fuel-core-lib")] fn service_config( node_config: NodeConfig, chain_config: ChainConfig, state_config: StateConfig, ) -> ServiceConfig { use std::time::Duration; #[cfg(feature = "rocksdb")] use fuel_core::state::rocks_db::{ColumnsPolicy, DatabaseConfig}; use fuel_core::{ combined_database::CombinedDatabaseConfig, fuel_core_graphql_api::ServiceConfig as GraphQLConfig, service::config::GasPriceConfig, }; use fuel_core_chain_config::SnapshotReader; use crate::DbType; let snapshot_reader = SnapshotReader::new_in_memory(chain_config, state_config); let combined_db_config = CombinedDatabaseConfig { database_path: match &node_config.database_type { DbType::InMemory => Default::default(), DbType::RocksDb(path) => path.clone().unwrap_or_default(), }, database_type: node_config.database_type.into(), #[cfg(feature = "rocksdb")] database_config: DatabaseConfig { cache_capacity: node_config.max_database_cache_size, max_fds: 512, columns_policy: ColumnsPolicy::Lazy, }, #[cfg(feature = "rocksdb")] state_rewind_policy: fuel_core::state::historical_rocksdb::StateRewindPolicy::RewindFullRange, }; ServiceConfig { graphql_config: GraphQLConfig { addr: node_config.addr, max_queries_depth: 16, max_queries_complexity: 80000, max_queries_recursive_depth: 16, max_queries_resolver_recursive_depth: 1, max_queries_directives: 10, max_concurrent_queries: 1024, required_fuel_block_height_tolerance: 10, required_fuel_block_height_timeout: Duration::from_secs(30), request_body_bytes_limit: 16 * 1024 * 1024, block_subscriptions_queue: 100, query_log_threshold_time: Duration::from_secs(2), api_request_timeout: Duration::from_secs(60), database_batch_size: 100, assemble_tx_dry_run_limit: 3, assemble_tx_estimate_predicates_limit: 5, costs: Default::default(), number_of_threads: 2, }, combined_db_config, snapshot_reader, historical_execution: node_config.historical_execution, utxo_validation: node_config.utxo_validation, debug: node_config.debug, block_production: node_config.block_production.into(), gas_price_config: GasPriceConfig { starting_exec_gas_price: node_config.starting_gas_price, ..GasPriceConfig::local_node() }, ..ServiceConfig::local_node() } } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-test-helpers/src/fuel_bin_service.rs
packages/fuels-test-helpers/src/fuel_bin_service.rs
use std::{ net::{IpAddr, SocketAddr}, path::PathBuf, time::Duration, }; use fuel_core_chain_config::{ChainConfig, SnapshotWriter, StateConfig}; use fuel_core_client::client::FuelClient; use fuel_core_services::State; use fuel_core_types::blockchain::header::LATEST_STATE_TRANSITION_VERSION; use fuels_core::{error, types::errors::Result as FuelResult}; use portpicker::{is_free, pick_unused_port}; use tempfile::{TempDir, tempdir}; use tokio::{process::Command, spawn, task::JoinHandle, time::sleep}; use crate::node_types::{DbType, NodeConfig, Trigger}; #[derive(Debug)] pub(crate) struct ExtendedConfig { pub node_config: NodeConfig, pub chain_config: ChainConfig, pub state_config: StateConfig, pub snapshot_dir: TempDir, } impl ExtendedConfig { pub fn args_vec(&self) -> fuels_core::types::errors::Result<Vec<String>> { let port = self.node_config.addr.port().to_string(); let mut args = vec![ "run".to_string(), "--ip".to_string(), "127.0.0.1".to_string(), "--port".to_string(), port, "--snapshot".to_string(), self.snapshot_dir .path() .to_str() .expect("Failed to find config file") .to_string(), ]; args.push("--db-type".to_string()); match &self.node_config.database_type { DbType::InMemory => args.push("in-memory".to_string()), DbType::RocksDb(path_to_db) => { args.push("rocks-db".to_string()); let path = path_to_db.as_ref().cloned().unwrap_or_else(|| { PathBuf::from(std::env::var("HOME").expect("HOME env var missing")) .join(".fuel/db") }); args.push("--db-path".to_string()); args.push(path.to_string_lossy().to_string()); } } if let Some(cache_size) = self.node_config.max_database_cache_size { args.push("--max-database-cache-size".to_string()); args.push(cache_size.to_string()); } match self.node_config.block_production { Trigger::Instant => { args.push("--poa-instant=true".to_string()); } Trigger::Never => { args.push("--poa-instant=false".to_string()); } Trigger::Interval { block_time } => { args.push(format!( "--poa-interval-period={}ms", block_time.as_millis() )); } Trigger::Open { period } => { args.push(format!("--poa-open-period={}ms", period.as_millis())); } }; let body_limit = self.node_config.graphql_request_body_bytes_limit; args.push(format!("--graphql-request-body-bytes-limit={body_limit}")); // This ensures forward compatibility when running against a newer node with a different native executor version. // If the node detects our older version in the chain configuration, it defaults to using the wasm executor. // However, since we don't include a wasm executor, this would lead to code loading failure and a node crash. // To prevent this, we force the node to use our version number to refer to its native executor. let executor_version = self .chain_config .genesis_state_transition_version .unwrap_or(LATEST_STATE_TRANSITION_VERSION); args.push(format!("--native-executor-version={executor_version}")); args.extend( [ (self.node_config.vm_backtrace, "--vm-backtrace"), (self.node_config.utxo_validation, "--utxo-validation"), ( self.node_config.historical_execution, "--historical-execution", ), (self.node_config.debug, "--debug"), ] .into_iter() .filter(|(flag, _)| *flag) .map(|(_, arg)| arg.to_string()), ); args.push(format!( "--starting-gas-price={}", self.node_config.starting_gas_price )); Ok(args) } pub fn write_temp_snapshot_files(self) -> FuelResult<TempDir> { let writer = SnapshotWriter::json(self.snapshot_dir.path()); writer .write_state_config(self.state_config, &self.chain_config) .map_err(|e| error!(Other, "could not write state config: {}", e))?; Ok(self.snapshot_dir) } } pub struct FuelService { pub bound_address: SocketAddr, handle: JoinHandle<()>, } impl FuelService { pub async fn new_node( node_config: NodeConfig, chain_config: ChainConfig, state_config: StateConfig, ) -> FuelResult<Self> { let requested_port = node_config.addr.port(); let bound_address = match requested_port { 0 => get_socket_address()?, _ if is_free(requested_port) => node_config.addr, _ => { return Err(error!( IO, "could not find a free port to start a fuel node" )); } }; let node_config = NodeConfig { addr: bound_address, ..node_config }; let extended_config = ExtendedConfig { node_config, state_config, chain_config, snapshot_dir: tempdir()?, }; let addr = extended_config.node_config.addr; let handle = run_node(extended_config).await?; server_health_check(addr).await?; Ok(FuelService { bound_address, handle, }) } pub fn stop(&self) -> FuelResult<State> { self.handle.abort(); Ok(State::Stopped) } } async fn server_health_check(address: SocketAddr) -> FuelResult<()> { let client = FuelClient::from(address); let mut attempts = 5; let mut healthy = client.health().await.unwrap_or(false); let between_attempts = Duration::from_millis(300); while attempts > 0 && !healthy { healthy = client.health().await.unwrap_or(false); sleep(between_attempts).await; attempts -= 1; } if !healthy { return Err(error!(Other, "could not connect to fuel core server")); } Ok(()) } fn get_socket_address() -> FuelResult<SocketAddr> { let free_port = pick_unused_port().ok_or(error!(Other, "could not pick a free port"))?; let address: IpAddr = "127.0.0.1".parse().expect("is valid ip"); Ok(SocketAddr::new(address, free_port)) } async fn run_node(extended_config: ExtendedConfig) -> FuelResult<JoinHandle<()>> { let args = extended_config.args_vec()?; let tempdir = extended_config.write_temp_snapshot_files()?; let binary_name = "fuel-core"; let paths = which::which_all(binary_name) .map_err(|_| error!(Other, "failed to list `{binary_name}` binaries"))? .collect::<Vec<_>>(); let path = paths .first() .ok_or_else(|| error!(Other, "no `{binary_name}` in PATH"))?; if paths.len() > 1 { eprintln!( "found more than one `{binary_name}` binary in PATH, using `{}`", path.display() ); } let mut command = Command::new(path); let running_node = command.args(args).kill_on_drop(true).env_clear().output(); let join_handle = spawn(async move { // ensure drop is not called on the tmp dir and it lives throughout the lifetime of the node let _unused = tempdir; let result = running_node .await .expect("error: could not find `fuel-core` in PATH`"); let stdout = String::from_utf8_lossy(&result.stdout); let stderr = String::from_utf8_lossy(&result.stderr); eprintln!( "the exit status from the fuel binary was: {result:?}, stdout: {stdout}, stderr: {stderr}" ); }); Ok(join_handle) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-test-helpers/src/utils.rs
packages/fuels-test-helpers/src/utils.rs
use fuel_core_chain_config::{CoinConfig, MessageConfig}; use fuels_core::types::{coin::Coin, message::Message}; pub(crate) fn into_coin_configs(coins: Vec<Coin>) -> Vec<CoinConfig> { coins .into_iter() .map(Into::into) .collect::<Vec<CoinConfig>>() } pub(crate) fn into_message_configs(messages: Vec<Message>) -> Vec<MessageConfig> { messages .into_iter() .map(Into::into) .collect::<Vec<MessageConfig>>() }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-test-helpers/src/accounts.rs
packages/fuels-test-helpers/src/accounts.rs
use std::mem::size_of; use fuel_crypto::SecretKey; use fuels_accounts::{signers::private_key::PrivateKeySigner, wallet::Wallet}; use fuels_core::types::errors::Result; use crate::{ node_types::{ChainConfig, NodeConfig}, setup_custom_assets_coins, setup_test_provider, wallets_config::*, }; /// Launches a local Fuel node, instantiates a provider, and returns a wallet. /// The provider and the wallets are instantiated with the default configs. /// For more configurable options, see the `launch_custom_provider_and_get_wallets` function. /// # Examples /// ``` /// use fuels_test_helpers::launch_provider_and_get_wallet; /// use fuels_accounts::ViewOnlyAccount; /// /// async fn single_wallet() -> Result<(), Box<dyn std::error::Error>> { /// let wallet = launch_provider_and_get_wallet().await?; /// dbg!(wallet.address()); /// Ok(()) /// } /// ``` pub async fn launch_provider_and_get_wallet() -> Result<Wallet> { let mut wallets = launch_custom_provider_and_get_wallets(WalletsConfig::new(Some(1), None, None), None, None) .await?; Ok(wallets.pop().expect("should have one wallet")) } /// Launches a custom node and provider, along with a configurable number of wallets. /// /// # Examples /// ``` /// use fuels_test_helpers::launch_custom_provider_and_get_wallets; /// use fuels_test_helpers::WalletsConfig; /// use fuels_accounts::ViewOnlyAccount; /// /// async fn multiple_wallets() -> Result<(), Box<dyn std::error::Error>> { /// let num_wallets = 2; /// let num_coins = 1; /// let amount = 1; /// let config = WalletsConfig::new(Some(num_wallets), Some(num_coins), Some(amount)); /// /// let mut wallets = launch_custom_provider_and_get_wallets(config, None, None).await?; /// let first_wallet = wallets.pop().unwrap(); /// dbg!(first_wallet.address()); /// Ok(()) /// } /// ``` pub async fn launch_custom_provider_and_get_wallets( wallet_config: WalletsConfig, node_config: Option<NodeConfig>, chain_config: Option<ChainConfig>, ) -> Result<Vec<Wallet>> { const SIZE_SECRET_KEY: usize = size_of::<SecretKey>(); const PADDING_BYTES: usize = SIZE_SECRET_KEY - size_of::<u64>(); let signers: Vec<_> = (1..=wallet_config.num_wallets()) .map(|wallet_counter| { let mut secret_key: [u8; SIZE_SECRET_KEY] = [0; SIZE_SECRET_KEY]; secret_key[PADDING_BYTES..].copy_from_slice(&wallet_counter.to_be_bytes()); let key = SecretKey::try_from(secret_key.as_slice()) .expect("This should never happen as we provide a [u8; SIZE_SECRET_KEY] array"); PrivateKeySigner::new(key) }) .collect(); let all_coins = signers .iter() .flat_map(|signer| setup_custom_assets_coins(signer.address(), wallet_config.assets())) .collect::<Vec<_>>(); let provider = setup_test_provider(all_coins, vec![], node_config, chain_config).await?; let wallets = signers .into_iter() .map(|signer| Wallet::new(signer, provider.clone())) .collect::<Vec<_>>(); Ok(wallets) } #[cfg(test)] mod tests { use fuel_core_chain_config::ChainConfig; use fuel_tx::{ConsensusParameters, TxParameters}; use fuel_types::AssetId; use fuels_accounts::ViewOnlyAccount; use fuels_core::types::{coin_type::CoinType, errors::Result}; use rand::Fill; use crate::{AssetConfig, WalletsConfig, launch_custom_provider_and_get_wallets}; #[tokio::test] async fn test_wallet_config() -> Result<()> { let num_wallets = 2; let num_coins = 3; let amount = 100; let config = WalletsConfig::new(Some(num_wallets), Some(num_coins), Some(amount)); let wallets = launch_custom_provider_and_get_wallets(config, None, None).await?; let provider = wallets.first().unwrap().try_provider()?; let consensus_parameters = provider.consensus_parameters().await?; assert_eq!(wallets.len(), num_wallets as usize); for wallet in &wallets { let coins = wallet .get_coins(*consensus_parameters.base_asset_id()) .await?; assert_eq!(coins.len(), num_coins as usize); for coin in &coins { assert_eq!(coin.amount, amount); } } Ok(()) } #[tokio::test] async fn test_wallet_config_multiple_assets() -> std::result::Result<(), Box<dyn std::error::Error>> { let mut rng = rand::thread_rng(); let num_wallets = 3; let asset_base = AssetConfig { id: AssetId::zeroed(), num_coins: 2, coin_amount: 4, }; let mut asset_id_1 = AssetId::zeroed(); asset_id_1.try_fill(&mut rng)?; let asset_1 = AssetConfig { id: asset_id_1, num_coins: 6, coin_amount: 8, }; let mut asset_id_2 = AssetId::zeroed(); asset_id_2.try_fill(&mut rng)?; let asset_2 = AssetConfig { id: asset_id_2, num_coins: 10, coin_amount: 12, }; let assets = vec![asset_base, asset_1, asset_2]; let config = WalletsConfig::new_multiple_assets(num_wallets, assets.clone()); let wallets = launch_custom_provider_and_get_wallets(config, None, None).await?; assert_eq!(wallets.len(), num_wallets as usize); for asset in assets { for wallet in &wallets { let resources = wallet .get_spendable_resources( asset.id, (asset.num_coins * asset.coin_amount).into(), None, ) .await?; assert_eq!(resources.len() as u64, asset.num_coins); for resource in resources { assert_eq!(resource.amount(), asset.coin_amount); match resource { CoinType::Coin(coin) => { assert_eq!(coin.owner, wallet.address()) } CoinType::Message(_) => panic!("resources contained messages"), CoinType::Unknown => panic!("resources contained unknown coins"), } } } } Ok(()) } #[tokio::test] async fn generated_wallets_are_deterministic() -> Result<()> { let num_wallets = 32; let num_coins = 1; let amount = 100; let config = WalletsConfig::new(Some(num_wallets), Some(num_coins), Some(amount)); let wallets = launch_custom_provider_and_get_wallets(config, None, None).await?; assert_eq!( wallets.get(31).unwrap().address().to_string(), "1c25f74846784d11715dfbe926fc41aca80e005fb782225aa8ad0dadf871cfc5".to_string() ); Ok(()) } #[tokio::test] async fn generated_wallets_with_custom_chain_config() -> Result<()> { let mut consensus_parameters = ConsensusParameters::default(); let block_gas_limit = 10_000_000_000; consensus_parameters.set_block_gas_limit(block_gas_limit); let max_gas_per_tx = 10_000_000_000; let tx_params = TxParameters::default().with_max_gas_per_tx(max_gas_per_tx); consensus_parameters.set_tx_params(tx_params); let chain_config = ChainConfig { consensus_parameters, ..ChainConfig::default() }; let num_wallets = 4; let num_coins = 3; let coin_amount = 2_000_000_000; let wallets = launch_custom_provider_and_get_wallets( WalletsConfig::new(Some(num_wallets), Some(num_coins), Some(coin_amount)), None, Some(chain_config), ) .await?; assert_eq!(wallets.len() as u64, num_wallets); for wallet in wallets.into_iter() { assert_eq!( wallet .try_provider()? .consensus_parameters() .await? .tx_params() .max_gas_per_tx(), max_gas_per_tx ); assert_eq!( wallet.get_coins(AssetId::zeroed()).await?.len() as u64, num_coins ); assert_eq!( *wallet .get_balances() .await? .get("0000000000000000000000000000000000000000000000000000000000000000") .expect("failed to get value"), (num_coins * coin_amount) as u128 ); } Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels/src/lib.rs
packages/fuels/src/lib.rs
//! # Fuel Rust SDK. //! //! ## Quickstart: `prelude` //! //! A prelude is provided which imports all the important data types and traits for you. Use this when you want to quickly bootstrap a new project. //! //! ```no_run //! # #[allow(unused)] //! use fuels::prelude::*; //! ``` //! //! Examples on how you can use the types imported by the prelude can be found in //! the [test suite](https://github.com/FuelLabs/fuels-rs/tree/master/packages/fuels/tests) pub mod tx { pub use fuel_tx::{ ConsensusParameters, ContractIdExt, ContractParameters, FeeParameters, GasCosts, PredicateParameters, Receipt, ScriptExecutionResult, ScriptParameters, StorageSlot, Transaction as FuelTransaction, TxId, TxParameters, TxPointer, UpgradePurpose, UploadSubsection, UtxoId, Witness, consensus_parameters, field, }; } #[cfg(feature = "std")] pub mod client { pub use fuel_core_client::client::{ FuelClient, pagination::{PageDirection, PaginationRequest}, }; } pub mod macros { pub use fuels_macros::*; } pub mod programs { pub use fuels_programs::*; } pub mod core { pub use fuels_core::{Configurable, Configurables, codec, constants, offsets, traits}; } pub mod crypto { pub use fuel_crypto::{Hasher, Message, PublicKey, SecretKey, Signature}; } pub mod accounts { pub use fuels_accounts::*; } pub mod types { pub use fuels_core::types::*; } #[cfg(feature = "std")] pub mod test_helpers { pub use fuels_test_helpers::*; } #[doc(hidden)] pub mod prelude { #[cfg(feature = "std")] pub use super::{ accounts::{ Account, ViewOnlyAccount, predicate::Predicate, provider::*, signers::*, wallet::Wallet, }, core::{ codec::{LogDecoder, LogId, LogResult}, traits::Signer, }, macros::setup_program_test, programs::{ calls::{CallHandler, CallParameters, ContractDependency, Execution}, contract::{Contract, LoadConfiguration, StorageConfiguration}, }, test_helpers::*, types::transaction_builders::*, }; pub use super::{ core::constants::*, macros::abigen, tx::Receipt, types::{ Address, AssetId, Bytes, ContractId, RawSlice, Salt, errors::{Error, Result}, transaction::*, }, }; }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/lib.rs
packages/fuels-core/src/lib.rs
pub mod codec; pub mod traits; pub mod types; mod utils; pub use utils::*; use crate::types::errors::Result; #[derive(Debug, Clone, Default, PartialEq)] pub struct Configurable { /// The offset (in bytes) within the binary where the data is located. pub offset: u64, /// The data related to the configurable. pub data: Vec<u8>, } #[derive(Debug, Clone, Default, PartialEq)] pub struct Configurables { pub offsets_with_data: Vec<Configurable>, } impl Configurables { pub fn new(offsets_with_data: Vec<Configurable>) -> Self { Self { offsets_with_data } } pub fn with_shifted_offsets(self, shift: i64) -> Result<Self> { let new_offsets_with_data = self .offsets_with_data .into_iter() .map(|c| { let new_offset = if shift.is_negative() { c.offset.checked_sub(shift.unsigned_abs()) } else { c.offset.checked_add(shift.unsigned_abs()) }; let new_offset = new_offset.ok_or_else(|| { crate::error!( Other, "Overflow occurred while shifting offset: {} + {shift}", c.offset ) })?; Ok(Configurable { offset: new_offset, data: c.data, }) }) .collect::<Result<Vec<_>>>()?; Ok(Self { offsets_with_data: new_offsets_with_data, }) } pub fn update_constants_in(&self, binary: &mut [u8]) { for c in &self.offsets_with_data { let offset = c.offset as usize; binary[offset..offset + c.data.len()].copy_from_slice(&c.data) } } } impl From<Configurables> for Vec<Configurable> { fn from(config: Configurables) -> Vec<Configurable> { config.offsets_with_data.clone() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_with_shifted_offsets_positive_shift() { let offsets_with_data = vec![Configurable { offset: 10u64, data: vec![1, 2, 3], }]; let configurables = Configurables::new(offsets_with_data.clone()); let shifted_configurables = configurables.with_shifted_offsets(5).unwrap(); let expected_offsets_with_data = vec![Configurable { offset: 15u64, data: vec![1, 2, 3], }]; assert_eq!( shifted_configurables.offsets_with_data, expected_offsets_with_data ); } #[test] fn test_with_shifted_offsets_negative_shift() { let offsets_with_data = vec![Configurable { offset: 10u64, data: vec![4, 5, 6], }]; let configurables = Configurables::new(offsets_with_data.clone()); let shifted_configurables = configurables.with_shifted_offsets(-5).unwrap(); let expected_offsets_with_data = vec![Configurable { offset: 5u64, data: vec![4, 5, 6], }]; assert_eq!( shifted_configurables.offsets_with_data, expected_offsets_with_data ); } #[test] fn test_with_shifted_offsets_zero_shift() { let offsets_with_data = vec![Configurable { offset: 20u64, data: vec![7, 8, 9], }]; let configurables = Configurables::new(offsets_with_data.clone()); let shifted_configurables = configurables.with_shifted_offsets(0).unwrap(); let expected_offsets_with_data = offsets_with_data; assert_eq!( shifted_configurables.offsets_with_data, expected_offsets_with_data ); } #[test] fn test_with_shifted_offsets_overflow() { let offsets_with_data = vec![Configurable { offset: u64::MAX - 1, data: vec![1, 2, 3], }]; let configurables = Configurables::new(offsets_with_data); let result = configurables.with_shifted_offsets(10); assert!(result.is_err()); if let Err(e) = result { assert!( e.to_string() .contains("Overflow occurred while shifting offset") ); } } #[test] fn test_with_shifted_offsets_underflow() { let offsets_with_data = vec![Configurable { offset: 5u64, data: vec![4, 5, 6], }]; let configurables = Configurables::new(offsets_with_data); let result = configurables.with_shifted_offsets(-10); assert!(result.is_err()); if let Err(e) = result { assert!( e.to_string() .contains("Overflow occurred while shifting offset") ); } } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec.rs
packages/fuels-core/src/codec.rs
mod abi_decoder; mod abi_encoder; mod abi_formatter; mod function_selector; mod logs; mod utils; use std::io::Read; pub use abi_decoder::*; pub use abi_encoder::*; pub use abi_formatter::*; pub use function_selector::*; pub use logs::*; use crate::{ traits::{Parameterize, Tokenizable}, types::errors::Result, }; /// Decodes `bytes` into type `T` following the schema defined by T's `Parameterize` impl pub fn try_from_bytes<T>(bytes: impl Read, decoder_config: DecoderConfig) -> Result<T> where T: Parameterize + Tokenizable, { let token = ABIDecoder::new(decoder_config).decode(&T::param_type(), bytes)?; T::from_token(token) } #[cfg(test)] mod tests { use super::*; use crate::{ constants::WORD_SIZE, types::{Address, AsciiString, AssetId, ContractId}, }; #[test] fn convert_all_from_bool_to_u64() -> Result<()> { let bytes = [255; WORD_SIZE]; macro_rules! test_decode { ($($for_type: ident),*) => { $(assert_eq!( try_from_bytes::<$for_type>(bytes.as_slice(), DecoderConfig::default())?, $for_type::MAX );)* }; } assert!(try_from_bytes::<bool>( bytes.as_slice(), DecoderConfig::default() )?); test_decode!(u8, u16, u32, u64); Ok(()) } #[test] fn convert_bytes_into_tuple() -> Result<()> { let tuple_in_bytes = [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2]; let the_tuple: (u64, u32) = try_from_bytes(tuple_in_bytes.as_slice(), DecoderConfig::default())?; assert_eq!(the_tuple, (1, 2)); Ok(()) } #[test] fn convert_native_types() -> Result<()> { let bytes = [255; 32]; macro_rules! test_decode { ($($for_type: ident),*) => { $(assert_eq!( try_from_bytes::<$for_type>(bytes.as_slice(), DecoderConfig::default())?, $for_type::new(bytes.as_slice().try_into()?) );)* }; } test_decode!(Address, ContractId, AssetId); Ok(()) } #[test] fn string_slice_is_read_in_total() { // This was a bug where the decoder read more bytes than it reported, causing the next // element to be read incorrectly. // given #[derive( fuels_macros::Tokenizable, fuels_macros::Parameterize, Clone, PartialEq, Debug, )] #[FuelsCorePath = "crate"] #[FuelsTypesPath = "crate::types"] struct Test { name: AsciiString, age: u64, } let input = Test { name: AsciiString::new("Alice".to_owned()).unwrap(), age: 42, }; let encoded = ABIEncoder::default() .encode(&[input.clone().into_token()]) .unwrap(); // when let decoded = try_from_bytes::<Test>(encoded.as_slice(), DecoderConfig::default()).unwrap(); // then assert_eq!(decoded, input); } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/types.rs
packages/fuels-core/src/types.rs
use fuel_types::bytes::padded_len; pub use fuel_types::{ Address, AssetId, BlockHeight, Bytes4, Bytes8, Bytes32, Bytes64, ChainId, ContractId, MessageId, Nonce, Salt, SubAssetId, Word, }; pub use crate::types::{core::*, method_descriptor::*, token::*, wrappers::*}; use crate::{error, types::errors::Result}; mod core; mod dry_runner; pub mod errors; pub mod param_types; mod token; pub mod transaction_builders; pub mod tx_response; pub mod tx_status; mod wrappers; pub use dry_runner::*; pub mod checksum_address; pub mod method_descriptor; pub type ByteArray = [u8; 8]; pub type Selector = Vec<u8>; /// Converts a u16 to a right aligned array of 8 bytes. pub fn pad_u16(value: u16) -> ByteArray { let mut padded = ByteArray::default(); padded[6..].copy_from_slice(&value.to_be_bytes()); padded } /// Converts a u32 to a right aligned array of 8 bytes. pub fn pad_u32(value: u32) -> ByteArray { let mut padded = [0u8; 8]; padded[4..].copy_from_slice(&value.to_be_bytes()); padded } pub fn pad_string(s: &str) -> Result<Vec<u8>> { let padded_len = padded_len(s.as_bytes()).ok_or_else(|| error!(Codec, "string is too long to be padded"))?; let pad = padded_len - s.len(); let mut padded = s.as_bytes().to_owned(); padded.extend_from_slice(&vec![0; pad]); Ok(padded) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/utils.rs
packages/fuels-core/src/utils.rs
pub mod constants; pub mod offsets; use constants::{WITNESS_STATIC_SIZE, WORD_SIZE}; use fuel_tx::Witness; use crate::{error, types::errors::Result}; pub fn checked_round_up_to_word_alignment(bytes_len: usize) -> Result<usize> { let lhs = bytes_len.checked_add(WORD_SIZE - 1).ok_or_else(|| { error!( Codec, "addition overflow while rounding up {bytes_len} bytes to word alignment" ) })?; let rhs = lhs.checked_rem(WORD_SIZE).ok_or_else(|| { error!( Codec, "remainder overflow while rounding up {bytes_len} bytes to word alignment" ) })?; lhs.checked_sub(rhs).ok_or_else(|| { error!( Codec, "subtraction overflow while rounding up {bytes_len} bytes to word alignment" ) }) } pub(crate) fn calculate_witnesses_size<'a, I: IntoIterator<Item = &'a Witness>>( witnesses: I, ) -> usize { witnesses .into_iter() .map(|w| w.as_ref().len() + WITNESS_STATIC_SIZE) .sum() } pub(crate) mod sealed { pub trait Sealed {} } #[cfg(test)] pub(crate) fn to_named<'a, I: IntoIterator<Item = &'a crate::types::param_types::ParamType>>( param_types: I, ) -> Vec<(String, crate::types::param_types::ParamType)> { param_types .into_iter() .map(|pt| ("".to_string(), pt.clone())) .collect() }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/traits.rs
packages/fuels-core/src/traits.rs
mod parameterize; mod signer; mod tokenizable; pub use parameterize::*; pub use signer::*; pub use tokenizable::*;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/abi_formatter.rs
packages/fuels-core/src/codec/abi_formatter.rs
use std::{collections::HashMap, io::Read}; use fuel_abi_types::abi::unified_program::UnifiedProgramABI; use itertools::Itertools; use super::{ABIDecoder, DecoderConfig}; use crate::{Result, error, types::param_types::ParamType}; pub struct ABIFormatter { functions: HashMap<String, Vec<ParamType>>, configurables: Vec<(String, ParamType)>, decoder: ABIDecoder, } impl ABIFormatter { pub fn has_fn(&self, fn_name: &str) -> bool { self.functions.contains_key(fn_name) } pub fn with_decoder_config(mut self, config: DecoderConfig) -> Self { self.decoder = ABIDecoder::new(config); self } pub fn from_abi(abi: UnifiedProgramABI) -> Result<Self> { let functions = abi .functions .iter() .map(|fun| (fun.name.clone(), fun.clone())) .collect::<HashMap<_, _>>(); let type_lookup = abi .types .iter() .map(|decl| (decl.type_id, decl.clone())) .collect::<HashMap<_, _>>(); let functions = functions .into_iter() .map(|(name, fun)| { let args = fun .inputs .iter() .map(|type_application| { ParamType::try_from_type_application(type_application, &type_lookup) }) .collect::<Result<Vec<_>>>()?; Ok((name.clone(), args)) }) .collect::<Result<HashMap<_, _>>>()?; let configurables = abi .configurables .into_iter() .flatten() .sorted_by_key(|c| c.offset) .map(|c| { let param_type = ParamType::try_from_type_application(&c.application, &type_lookup)?; Ok((c.name, param_type)) }) .collect::<Result<Vec<_>>>()?; Ok(Self { functions, decoder: ABIDecoder::default(), configurables, }) } pub fn from_json_abi(abi: impl AsRef<str>) -> Result<Self> { let parsed_abi = UnifiedProgramABI::from_json_abi(abi.as_ref())?; Self::from_abi(parsed_abi) } pub fn decode_fn_args<R: Read>(&self, fn_name: &str, data: R) -> Result<Vec<String>> { let args = self .functions .get(fn_name) .ok_or_else(|| error!(Codec, "Function '{}' not found in the ABI", fn_name))?; self.decoder.decode_multiple_as_debug_str(args, data) } pub fn decode_configurables<R: Read>( &self, configurable_data: R, ) -> Result<Vec<(String, String)>> { let param_types = self .configurables .iter() .map(|(_, param_type)| param_type) .cloned() .collect::<Vec<_>>(); let decoded = self .decoder .decode_multiple_as_debug_str(&param_types, configurable_data)? .into_iter() .zip(&self.configurables) .map(|(value, (name, _))| (name.clone(), value)) .collect(); Ok(decoded) } } #[cfg(test)] mod tests { use super::*; use crate::types::errors::Error; #[test] fn gracefully_handles_missing_fn() { // given let decoder = ABIFormatter::from_abi(UnifiedProgramABI::default()).unwrap(); // when let err = decoder .decode_fn_args("non_existent_fn", [].as_slice()) .unwrap_err(); // then let Error::Codec(err) = err else { panic!("Expected Codec error, got {:?}", err); }; assert_eq!(err, "Function 'non_existent_fn' not found in the ABI"); } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/function_selector.rs
packages/fuels-core/src/codec/function_selector.rs
pub use fuels_code_gen::utils::encode_fn_selector; /// This uses the default `EncoderConfig` configuration. #[macro_export] macro_rules! calldata { ( $($arg: expr),* ) => { ::fuels::core::codec::ABIEncoder::default().encode(&[$(::fuels::core::traits::Tokenizable::into_token($arg)),*]) } } pub use calldata;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/utils.rs
packages/fuels-core/src/codec/utils.rs
use crate::types::errors::{Result, error}; pub(crate) struct CounterWithLimit { count: usize, max: usize, name: String, direction: CodecDirection, } #[derive(Debug)] pub(crate) enum CodecDirection { Encoding, Decoding, } impl std::fmt::Display for CodecDirection { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { CodecDirection::Encoding => write!(f, "encoding"), CodecDirection::Decoding => write!(f, "decoding"), } } } impl CounterWithLimit { pub(crate) fn new(max: usize, name: impl Into<String>, direction: CodecDirection) -> Self { Self { count: 0, max, direction, name: name.into(), } } pub(crate) fn increase(&mut self) -> Result<()> { self.count += 1; if self.count > self.max { return Err(error!( Codec, "{} limit `{}` reached while {}. Try increasing it", self.name, self.max, self.direction )); } Ok(()) } pub(crate) fn decrease(&mut self) { if self.count > 0 { self.count -= 1; } } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/logs.rs
packages/fuels-core/src/codec/logs.rs
use std::{ any::TypeId, collections::{HashMap, HashSet}, fmt::{Debug, Formatter}, iter::FilterMap, }; /// Trait that represents a log with a unique identifier. pub trait Log { /// Returns the unique identifier of the log as a string. const LOG_ID: &'static str; /// Returns the unique identifier of the log as a `u64`. const LOG_ID_U64: u64; } #[derive(Debug, Clone)] pub struct ErrorDetails { pub(crate) pkg: String, pub(crate) file: String, pub(crate) line: u64, pub(crate) column: u64, pub(crate) log_id: Option<String>, pub(crate) msg: Option<String>, } impl ErrorDetails { pub fn new( pkg: String, file: String, line: u64, column: u64, log_id: Option<String>, msg: Option<String>, ) -> Self { Self { pkg, file, line, column, log_id, msg, } } } use fuel_tx::{ContractId, Receipt}; use crate::{ codec::{ABIDecoder, DecoderConfig}, traits::{Parameterize, Tokenizable}, types::errors::{Error, Result, error}, }; #[derive(Clone)] pub struct LogFormatter { formatter: fn(DecoderConfig, &[u8]) -> Result<String>, type_id: TypeId, } impl LogFormatter { pub fn new_log<T: Tokenizable + Parameterize + Debug + 'static>() -> Self { Self { formatter: Self::format_log::<T>, type_id: TypeId::of::<T>(), } } pub fn new_error<T: Tokenizable + Parameterize + std::error::Error + 'static>() -> Self { Self { formatter: Self::format_error::<T>, type_id: TypeId::of::<T>(), } } fn format_log<T: Parameterize + Tokenizable + Debug>( decoder_config: DecoderConfig, bytes: &[u8], ) -> Result<String> { let token = ABIDecoder::new(decoder_config).decode(&T::param_type(), bytes)?; Ok(format!("{:?}", T::from_token(token)?)) } fn format_error<T: Parameterize + Tokenizable + std::error::Error>( decoder_config: DecoderConfig, bytes: &[u8], ) -> Result<String> { let token = ABIDecoder::new(decoder_config).decode(&T::param_type(), bytes)?; Ok(T::from_token(token)?.to_string()) } pub fn can_handle_type<T: Tokenizable + Parameterize + 'static>(&self) -> bool { TypeId::of::<T>() == self.type_id } pub fn format(&self, decoder_config: DecoderConfig, bytes: &[u8]) -> Result<String> { (self.formatter)(decoder_config, bytes) } } impl Debug for LogFormatter { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct("LogFormatter") .field("type_id", &self.type_id) .finish() } } /// Holds a unique log ID #[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] pub struct LogId(ContractId, String); /// Struct used to pass the log mappings from the Abigen #[derive(Debug, Clone, Default)] pub struct LogDecoder { /// A mapping of LogId and param-type log_formatters: HashMap<LogId, LogFormatter>, error_codes: HashMap<u64, ErrorDetails>, decoder_config: DecoderConfig, } #[derive(Debug)] pub struct LogResult { pub results: Vec<Result<String>>, } impl LogResult { pub fn filter_succeeded(&self) -> Vec<&str> { self.results .iter() .filter_map(|result| result.as_deref().ok()) .collect() } pub fn filter_failed(&self) -> Vec<&Error> { self.results .iter() .filter_map(|result| result.as_ref().err()) .collect() } } impl LogDecoder { pub fn new( log_formatters: HashMap<LogId, LogFormatter>, error_codes: HashMap<u64, ErrorDetails>, ) -> Self { Self { log_formatters, error_codes, decoder_config: Default::default(), } } pub fn get_error_codes(&self, id: &u64) -> Option<&ErrorDetails> { self.error_codes.get(id) } pub fn set_decoder_config(&mut self, decoder_config: DecoderConfig) -> &mut Self { self.decoder_config = decoder_config; self } /// Get all logs results from the given receipts as `Result<String>` pub fn decode_logs(&self, receipts: &[Receipt]) -> LogResult { let results = receipts .iter() .extract_log_id_and_data() .map(|(log_id, data)| self.format_log(&log_id, &data)) .collect(); LogResult { results } } fn format_log(&self, log_id: &LogId, data: &[u8]) -> Result<String> { self.log_formatters .get(log_id) .ok_or_else(|| { error!( Codec, "missing log formatter for log_id: `{:?}`, data: `{:?}`. \ Consider adding external contracts using `with_contracts()`", log_id, data ) }) .and_then(|log_formatter| log_formatter.format(self.decoder_config, data)) } pub(crate) fn decode_last_log(&self, receipts: &[Receipt]) -> Result<String> { receipts .iter() .rev() .extract_log_id_and_data() .next() .ok_or_else(|| error!(Codec, "no receipts found for decoding last log")) .and_then(|(log_id, data)| self.format_log(&log_id, &data)) } pub(crate) fn decode_last_two_logs(&self, receipts: &[Receipt]) -> Result<(String, String)> { let res = receipts .iter() .rev() .extract_log_id_and_data() .map(|(log_id, data)| self.format_log(&log_id, &data)) .take(2) .collect::<Result<Vec<_>>>(); match res.as_deref() { Ok([rhs, lhs]) => Ok((lhs.to_string(), rhs.to_string())), Ok(some_slice) => Err(error!( Codec, "expected to have two logs. Found {}", some_slice.len() )), Err(_) => Err(res.expect_err("must be an error")), } } /// Get decoded logs with specific type from the given receipts. /// Note that this method returns the actual type and not a `String` representation. pub fn decode_logs_with_type<T: Tokenizable + Parameterize + 'static>( &self, receipts: &[Receipt], ) -> Result<Vec<T>> { let target_ids: HashSet<LogId> = self .log_formatters .iter() .filter(|(_, log_formatter)| log_formatter.can_handle_type::<T>()) .map(|(log_id, _)| log_id.clone()) .collect(); receipts .iter() .extract_log_id_and_data() .filter_map(|(log_id, bytes)| { target_ids.contains(&log_id).then(|| { let token = ABIDecoder::new(self.decoder_config) .decode(&T::param_type(), bytes.as_slice())?; T::from_token(token) }) }) .collect() } /// Get LogIds and lazy decoders for specific type from a single receipt. pub fn decode_logs_lazy<'a, T: Tokenizable + Parameterize + 'static>( &'a self, receipt: &'a Receipt, ) -> impl Iterator<Item = impl FnOnce() -> Result<T>> + 'a { let target_ids: HashSet<&LogId> = self .log_formatters .iter() .filter(|(_, log_formatter)| log_formatter.can_handle_type::<T>()) .map(|(log_id, _)| log_id) .collect(); std::iter::once(receipt).extract_matching_logs_lazy::<T>(target_ids, self.decoder_config) } pub fn merge(&mut self, log_decoder: LogDecoder) { self.log_formatters.extend(log_decoder.log_formatters); self.error_codes.extend(log_decoder.error_codes); } } trait ExtractLogIdData { type Output: Iterator<Item = (LogId, Vec<u8>)>; fn extract_log_id_and_data(self) -> Self::Output; } trait ExtractLogIdLazy { fn extract_matching_logs_lazy<T: Tokenizable + Parameterize + 'static>( self, target_ids: HashSet<&LogId>, decoder_config: DecoderConfig, ) -> impl Iterator<Item = impl FnOnce() -> Result<T>>; } impl<'a, I: Iterator<Item = &'a Receipt>> ExtractLogIdData for I { type Output = FilterMap<Self, fn(&Receipt) -> Option<(LogId, Vec<u8>)>>; fn extract_log_id_and_data(self) -> Self::Output { self.filter_map(|r| match r { Receipt::LogData { rb, data: Some(data), id, .. } => Some((LogId(*id, (*rb).to_string()), data.to_vec())), Receipt::Log { ra, rb, id, .. } => { Some((LogId(*id, (*rb).to_string()), ra.to_be_bytes().to_vec())) } _ => None, }) } } impl<'a, I: Iterator<Item = &'a Receipt>> ExtractLogIdLazy for I { fn extract_matching_logs_lazy<T: Tokenizable + Parameterize + 'static>( self, target_ids: HashSet<&LogId>, decoder_config: DecoderConfig, ) -> impl Iterator<Item = impl FnOnce() -> Result<T>> { self.filter_map(move |r| { let log_id = match r { Receipt::LogData { rb, id, .. } => LogId(*id, (*rb).to_string()), Receipt::Log { rb, id, .. } => LogId(*id, (*rb).to_string()), _ => return None, }; if !target_ids.contains(&log_id) { return None; } enum Data<'a> { LogData(&'a [u8]), LogRa(u64), } let data = match r { Receipt::LogData { data: Some(data), .. } => Some(Data::LogData(data.as_slice())), Receipt::Log { ra, .. } => Some(Data::LogRa(*ra)), _ => None, }; data.map(move |data| { move || { let normalized_data = match data { Data::LogData(data) => data, Data::LogRa(ra) => &ra.to_be_bytes(), }; let token = ABIDecoder::new(decoder_config) .decode(&T::param_type(), normalized_data)?; T::from_token(token) } }) }) } } pub fn log_formatters_lookup( log_id_log_formatter_pairs: Vec<(String, LogFormatter)>, contract_id: ContractId, ) -> HashMap<LogId, LogFormatter> { log_id_log_formatter_pairs .into_iter() .map(|(id, log_formatter)| (LogId(contract_id, id), log_formatter)) .collect() }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/abi_decoder.rs
packages/fuels-core/src/codec/abi_decoder.rs
mod bounded_decoder; mod decode_as_debug_str; use std::io::Read; use crate::{ codec::abi_decoder::{ bounded_decoder::BoundedDecoder, decode_as_debug_str::decode_as_debug_str, }, types::{Token, errors::Result, param_types::ParamType}, }; #[derive(Debug, Clone, Copy)] pub struct DecoderConfig { /// Entering a struct, array, tuple, enum or vector increases the depth. Decoding will fail if /// the current depth becomes greater than `max_depth` configured here. pub max_depth: usize, /// Every decoded Token will increase the token count. Decoding will fail if the current /// token count becomes greater than `max_tokens` configured here. pub max_tokens: usize, } // ANCHOR: default_decoder_config impl Default for DecoderConfig { fn default() -> Self { Self { max_depth: 45, max_tokens: 10_000, } } } // ANCHOR_END: default_decoder_config #[derive(Default)] pub struct ABIDecoder { pub config: DecoderConfig, } impl ABIDecoder { pub fn new(config: DecoderConfig) -> Self { Self { config } } /// Decodes `bytes` following the schema described in `param_type` into its respective `Token`. /// /// # Arguments /// /// * `param_type`: The `ParamType` of the type we expect is encoded inside `bytes`. /// * `bytes`: The bytes to be used in the decoding process. /// # Examples /// /// ``` /// use fuels_core::codec::ABIDecoder; /// use fuels_core::traits::Tokenizable; /// use fuels_core::types::param_types::ParamType; /// /// let decoder = ABIDecoder::default(); /// /// let token = decoder.decode(&ParamType::U64, [0, 0, 0, 0, 0, 0, 0, 7].as_slice()).unwrap(); /// /// assert_eq!(u64::from_token(token).unwrap(), 7u64); /// ``` pub fn decode(&self, param_type: &ParamType, mut bytes: impl Read) -> Result<Token> { BoundedDecoder::new(self.config).decode(param_type, &mut bytes) } /// Same as `decode` but decodes multiple `ParamType`s in one go. /// # Examples /// ``` /// use fuels_core::codec::ABIDecoder; /// use fuels_core::types::param_types::ParamType; /// use fuels_core::types::Token; /// /// let decoder = ABIDecoder::default(); /// let data = [7, 8]; /// /// let tokens = decoder.decode_multiple(&[ParamType::U8, ParamType::U8], data.as_slice()).unwrap(); /// /// assert_eq!(tokens, vec![Token::U8(7), Token::U8(8)]); /// ``` pub fn decode_multiple( &self, param_types: &[ParamType], mut bytes: impl Read, ) -> Result<Vec<Token>> { BoundedDecoder::new(self.config).decode_multiple(param_types, &mut bytes) } /// Decodes `bytes` following the schema described in `param_type` into its respective debug /// string. /// /// # Arguments /// /// * `param_type`: The `ParamType` of the type we expect is encoded inside `bytes`. /// * `bytes`: The bytes to be used in the decoding process. /// # Examples /// /// ``` /// use fuels_core::codec::ABIDecoder; /// use fuels_core::types::param_types::ParamType; /// /// let decoder = ABIDecoder::default(); /// /// let debug_string = decoder.decode_as_debug_str(&ParamType::U64, [0, 0, 0, 0, 0, 0, 0, 7].as_slice()).unwrap(); /// let expected_value = 7u64; /// /// assert_eq!(debug_string, format!("{expected_value}")); /// ``` pub fn decode_as_debug_str( &self, param_type: &ParamType, mut bytes: impl Read, ) -> Result<String> { let token = BoundedDecoder::new(self.config).decode(param_type, &mut bytes)?; decode_as_debug_str(param_type, &token) } pub fn decode_multiple_as_debug_str( &self, param_types: &[ParamType], mut bytes: impl Read, ) -> Result<Vec<String>> { let token = BoundedDecoder::new(self.config).decode_multiple(param_types, &mut bytes)?; token .into_iter() .zip(param_types) .map(|(token, param_type)| decode_as_debug_str(param_type, &token)) .collect() } } #[cfg(test)] mod tests { use std::vec; use ParamType::*; use super::*; use crate::{ constants::WORD_SIZE, to_named, traits::Parameterize, types::{StaticStringToken, U256, errors::Error, param_types::EnumVariants}, }; #[test] fn decode_multiple_uint() -> Result<()> { let types = vec![ ParamType::U8, ParamType::U16, ParamType::U32, ParamType::U64, ParamType::U128, ParamType::U256, ]; let data = [ 255, // u8 255, 255, // u16 255, 255, 255, 255, // u32 255, 255, 255, 255, 255, 255, 255, 255, // u64 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // u128 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // u256 ]; let decoded = ABIDecoder::default().decode_multiple(&types, data.as_slice())?; let expected = vec![ Token::U8(u8::MAX), Token::U16(u16::MAX), Token::U32(u32::MAX), Token::U64(u64::MAX), Token::U128(u128::MAX), Token::U256(U256::MAX), ]; assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_bool() -> Result<()> { let types = vec![ParamType::Bool, ParamType::Bool]; let data = [1, 0]; let decoded = ABIDecoder::default().decode_multiple(&types, data.as_slice())?; let expected = vec![Token::Bool(true), Token::Bool(false)]; assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_b256() -> Result<()> { let data = [ 213, 87, 156, 70, 223, 204, 127, 24, 32, 112, 19, 230, 91, 68, 228, 203, 78, 44, 34, 152, 244, 172, 69, 123, 168, 248, 39, 67, 243, 30, 147, 11, ]; let decoded = ABIDecoder::default().decode(&ParamType::B256, data.as_slice())?; assert_eq!(decoded, Token::B256(data)); Ok(()) } #[test] fn decode_string_array() -> Result<()> { let types = vec![ParamType::StringArray(23), ParamType::StringArray(5)]; let data = [ 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 117, 108, 108, 32, 115, 101, 110, 116, 101, 110, 99, 101, //This is a full sentence 72, 101, 108, 108, 111, // Hello ]; let decoded = ABIDecoder::default().decode_multiple(&types, data.as_slice())?; let expected = vec![ Token::StringArray(StaticStringToken::new( "This is a full sentence".into(), Some(23), )), Token::StringArray(StaticStringToken::new("Hello".into(), Some(5))), ]; assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_string_slice() -> Result<()> { let data = [ 0, 0, 0, 0, 0, 0, 0, 23, // [length] 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 117, 108, 108, 32, 115, 101, 110, 116, 101, 110, 99, 101, //This is a full sentence ]; let decoded = ABIDecoder::default().decode(&ParamType::StringSlice, data.as_slice())?; let expected = Token::StringSlice(StaticStringToken::new( "This is a full sentence".into(), None, )); assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_string() -> Result<()> { let data = [ 0, 0, 0, 0, 0, 0, 0, 23, // [length] 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 117, 108, 108, 32, 115, 101, 110, 116, 101, 110, 99, 101, //This is a full sentence ]; let decoded = ABIDecoder::default().decode(&ParamType::String, data.as_slice())?; let expected = Token::String("This is a full sentence".to_string()); assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_tuple() -> Result<()> { let param_type = ParamType::Tuple(vec![ParamType::U32, ParamType::Bool]); let data = [ 0, 0, 0, 255, //u32 1, //bool ]; let result = ABIDecoder::default().decode(&param_type, data.as_slice())?; let expected = Token::Tuple(vec![Token::U32(255), Token::Bool(true)]); assert_eq!(result, expected); Ok(()) } #[test] fn decode_array() -> Result<()> { let types = vec![ParamType::Array(Box::new(ParamType::U8), 2)]; let data = [255, 42]; let decoded = ABIDecoder::default().decode_multiple(&types, data.as_slice())?; let expected = vec![Token::Array(vec![Token::U8(255), Token::U8(42)])]; assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_struct() -> Result<()> { // struct MyStruct { // foo: u8, // bar: bool, // } let data = [1, 1]; let param_type = ParamType::Struct { name: "".to_string(), fields: to_named(&[ParamType::U8, ParamType::Bool]), generics: vec![], }; let decoded = ABIDecoder::default().decode(&param_type, data.as_slice())?; let expected = Token::Struct(vec![Token::U8(1), Token::Bool(true)]); assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_bytes() -> Result<()> { let data = [0, 0, 0, 0, 0, 0, 0, 7, 255, 0, 1, 2, 3, 4, 5]; let decoded = ABIDecoder::default().decode(&ParamType::Bytes, data.as_slice())?; let expected = Token::Bytes([255, 0, 1, 2, 3, 4, 5].to_vec()); assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_raw_slice() -> Result<()> { let data = [0, 0, 0, 0, 0, 0, 0, 7, 255, 0, 1, 2, 3, 4, 5]; let decoded = ABIDecoder::default().decode(&ParamType::RawSlice, data.as_slice())?; let expected = Token::RawSlice([255, 0, 1, 2, 3, 4, 5].to_vec()); assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_enum() -> Result<()> { // enum MyEnum { // x: u32, // y: bool, // } let types = to_named(&[ParamType::U32, ParamType::Bool]); let inner_enum_types = EnumVariants::new(types)?; let types = vec![ParamType::Enum { name: "".to_string(), enum_variants: inner_enum_types.clone(), generics: vec![], }]; let data = [ 0, 0, 0, 0, 0, 0, 0, 0, // discriminant 0, 0, 0, 42, // u32 ]; let decoded = ABIDecoder::default().decode_multiple(&types, data.as_slice())?; let expected = vec![Token::Enum(Box::new((0, Token::U32(42), inner_enum_types)))]; assert_eq!(decoded, expected); Ok(()) } #[test] fn decode_nested_struct() -> Result<()> { // struct Foo { // x: u16, // y: Bar, // } // // struct Bar { // a: bool, // b: u8[2], // } let fields = to_named(&[ ParamType::U16, ParamType::Struct { name: "".to_string(), fields: to_named(&[ ParamType::Bool, ParamType::Array(Box::new(ParamType::U8), 2), ]), generics: vec![], }, ]); let nested_struct = ParamType::Struct { name: "".to_string(), fields, generics: vec![], }; let data = [0, 10, 1, 1, 2]; let decoded = ABIDecoder::default().decode(&nested_struct, data.as_slice())?; let my_nested_struct = vec![ Token::U16(10), Token::Struct(vec![ Token::Bool(true), Token::Array(vec![Token::U8(1), Token::U8(2)]), ]), ]; assert_eq!(decoded, Token::Struct(my_nested_struct)); Ok(()) } #[test] fn decode_comprehensive() -> Result<()> { // struct Foo { // x: u16, // y: Bar, // } // // struct Bar { // a: bool, // b: u8[2], // } // fn: long_function(Foo,u8[2],b256,str[3],str) // Parameters let fields = to_named(&[ ParamType::U16, ParamType::Struct { name: "".to_string(), fields: to_named(&[ ParamType::Bool, ParamType::Array(Box::new(ParamType::U8), 2), ]), generics: vec![], }, ]); let nested_struct = ParamType::Struct { name: "".to_string(), fields, generics: vec![], }; let u8_arr = ParamType::Array(Box::new(ParamType::U8), 2); let b256 = ParamType::B256; let types = [nested_struct, u8_arr, b256]; let bytes = [ 0, 10, // u16 1, // bool 1, 2, // array[u8;2] 1, 2, // array[u8;2] 213, 87, 156, 70, 223, 204, 127, 24, 32, 112, 19, 230, 91, 68, 228, 203, 78, 44, 34, 152, 244, 172, 69, 123, 168, 248, 39, 67, 243, 30, 147, 11, // b256 ]; let decoded = ABIDecoder::default().decode_multiple(&types, bytes.as_slice())?; // Expected tokens let foo = Token::Struct(vec![ Token::U16(10), Token::Struct(vec![ Token::Bool(true), Token::Array(vec![Token::U8(1), Token::U8(2)]), ]), ]); let u8_arr = Token::Array(vec![Token::U8(1), Token::U8(2)]); let b256 = Token::B256([ 213, 87, 156, 70, 223, 204, 127, 24, 32, 112, 19, 230, 91, 68, 228, 203, 78, 44, 34, 152, 244, 172, 69, 123, 168, 248, 39, 67, 243, 30, 147, 11, ]); let expected: Vec<Token> = vec![foo, u8_arr, b256]; assert_eq!(decoded, expected); Ok(()) } #[test] fn enums_with_all_unit_variants_are_decoded_from_one_word() -> Result<()> { let data = [0, 0, 0, 0, 0, 0, 0, 1]; let types = to_named(&[ParamType::Unit, ParamType::Unit]); let enum_variants = EnumVariants::new(types)?; let enum_w_only_units = ParamType::Enum { name: "".to_string(), enum_variants: enum_variants.clone(), generics: vec![], }; let result = ABIDecoder::default().decode(&enum_w_only_units, data.as_slice())?; let expected_enum = Token::Enum(Box::new((1, Token::Unit, enum_variants))); assert_eq!(result, expected_enum); Ok(()) } #[test] fn out_of_bounds_discriminant_is_detected() -> Result<()> { let data = [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2]; let types = to_named(&[ParamType::U64]); let enum_variants = EnumVariants::new(types)?; let enum_type = ParamType::Enum { name: "".to_string(), enum_variants, generics: vec![], }; let result = ABIDecoder::default().decode(&enum_type, data.as_slice()); let error = result.expect_err("should have resulted in an error"); let expected_msg = "discriminant `1` doesn't point to any variant: "; assert!(matches!(error, Error::Other(str) if str.starts_with(expected_msg))); Ok(()) } #[test] pub fn division_by_zero() { let param_type = Vec::<[u16; 0]>::param_type(); let result = ABIDecoder::default().decode(&param_type, [].as_slice()); assert!(matches!(result, Err(Error::IO(_)))); } #[test] pub fn multiply_overflow_enum() { let result = ABIDecoder::default().decode( &Enum { name: "".to_string(), enum_variants: EnumVariants::new(to_named(&[ Array(Box::new(Array(Box::new(RawSlice), 8)), usize::MAX), B256, B256, B256, B256, B256, B256, B256, B256, B256, B256, ])) .unwrap(), generics: vec![U16], }, [].as_slice(), ); assert!(matches!(result, Err(Error::IO(_)))); } #[test] pub fn multiply_overflow_arith() { let mut param_type: ParamType = U16; for _ in 0..50 { param_type = Array(Box::new(param_type), 8); } let result = ABIDecoder::default().decode( &Enum { name: "".to_string(), enum_variants: EnumVariants::new(to_named(&[param_type])).unwrap(), generics: vec![U16], }, [].as_slice(), ); assert!(matches!(result, Err(Error::IO(_)))); } #[test] pub fn capacity_overflow() { let result = ABIDecoder::default().decode( &Array(Box::new(Array(Box::new(Tuple(vec![])), usize::MAX)), 1), [].as_slice(), ); assert!(matches!(result, Err(Error::Codec(_)))); } #[test] pub fn stack_overflow() { let mut param_type: ParamType = U16; for _ in 0..13500 { param_type = Vector(Box::new(param_type)); } let result = ABIDecoder::default().decode(&param_type, [].as_slice()); assert!(matches!(result, Err(Error::IO(_)))); } #[test] pub fn capacity_malloc() { let param_type = Array(Box::new(U8), usize::MAX); let result = ABIDecoder::default().decode(&param_type, [].as_slice()); assert!(matches!(result, Err(Error::IO(_)))); } #[test] fn max_depth_surpassed() { const MAX_DEPTH: usize = 2; let config = DecoderConfig { max_depth: MAX_DEPTH, ..Default::default() }; let msg = format!("depth limit `{MAX_DEPTH}` reached while decoding. Try increasing it"); // for each nested enum so that it may read the discriminant let data = [0; MAX_DEPTH * WORD_SIZE]; [nested_struct, nested_enum, nested_tuple, nested_array] .iter() .map(|fun| fun(MAX_DEPTH + 1)) .for_each(|param_type| { assert_decoding_failed_w_data(config, &param_type, &msg, data.as_slice()); }) } #[test] fn depth_is_not_reached() { const MAX_DEPTH: usize = 3; const ACTUAL_DEPTH: usize = MAX_DEPTH - 1; // enough data to decode 2*ACTUAL_DEPTH enums (discriminant + u8 = 2*WORD_SIZE) let data = [0; 2 * ACTUAL_DEPTH * (WORD_SIZE * 2)]; let config = DecoderConfig { max_depth: MAX_DEPTH, ..Default::default() }; [nested_struct, nested_enum, nested_tuple, nested_array] .into_iter() .map(|fun| fun(ACTUAL_DEPTH)) .map(|param_type| { // Wrapping everything in a structure so that we may check whether the depth is // decremented after finishing every struct field. ParamType::Struct { name: "".to_string(), fields: to_named(&[param_type.clone(), param_type]), generics: vec![], } }) .for_each(|param_type| { ABIDecoder::new(config) .decode(&param_type, data.as_slice()) .unwrap(); }) } #[test] fn too_many_tokens() { let config = DecoderConfig { max_tokens: 3, ..Default::default() }; { let data = [0; 3 * WORD_SIZE]; let inner_param_types = vec![ParamType::U64; 3]; for param_type in [ ParamType::Struct { name: "".to_string(), fields: to_named(&inner_param_types), generics: vec![], }, ParamType::Tuple(inner_param_types.clone()), ParamType::Array(Box::new(ParamType::U64), 3), ] { assert_decoding_failed_w_data( config, &param_type, "token limit `3` reached while decoding. Try increasing it", &data, ); } } { let data = [0, 0, 0, 0, 0, 0, 0, 3, 1, 2, 3]; assert_decoding_failed_w_data( config, &ParamType::Vector(Box::new(ParamType::U8)), "token limit `3` reached while decoding. Try increasing it", &data, ); } } #[test] fn token_count_is_being_reset_between_decodings() { // given let config = DecoderConfig { max_tokens: 3, ..Default::default() }; let param_type = ParamType::Array(Box::new(ParamType::StringArray(0)), 2); let decoder = ABIDecoder::new(config); decoder.decode(&param_type, [].as_slice()).unwrap(); // when let result = decoder.decode(&param_type, [].as_slice()); // then result.expect("element count to be reset"); } fn assert_decoding_failed_w_data( config: DecoderConfig, param_type: &ParamType, msg: &str, data: &[u8], ) { let decoder = ABIDecoder::new(config); let err = decoder.decode(param_type, data); let Err(Error::Codec(actual_msg)) = err else { panic!("expected a `Codec` error. Got: `{err:?}`"); }; assert_eq!(actual_msg, msg); } fn nested_struct(depth: usize) -> ParamType { let fields = if depth == 1 { vec![] } else { to_named(&[nested_struct(depth - 1)]) }; ParamType::Struct { name: "".to_string(), fields, generics: vec![], } } fn nested_enum(depth: usize) -> ParamType { let fields = if depth == 1 { to_named(&[ParamType::U8]) } else { to_named(&[nested_enum(depth - 1)]) }; ParamType::Enum { name: "".to_string(), enum_variants: EnumVariants::new(fields).unwrap(), generics: vec![], } } fn nested_array(depth: usize) -> ParamType { let field = if depth == 1 { ParamType::U8 } else { nested_array(depth - 1) }; ParamType::Array(Box::new(field), 1) } fn nested_tuple(depth: usize) -> ParamType { let fields = if depth == 1 { vec![ParamType::U8] } else { vec![nested_tuple(depth - 1)] }; ParamType::Tuple(fields) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/abi_encoder.rs
packages/fuels-core/src/codec/abi_encoder.rs
mod bounded_encoder; use std::default::Default; use crate::{ codec::abi_encoder::bounded_encoder::BoundedEncoder, types::{Token, errors::Result}, }; #[derive(Debug, Clone, Copy)] pub struct EncoderConfig { /// Entering a struct, array, tuple, enum or vector increases the depth. Encoding will fail if /// the current depth becomes greater than `max_depth` configured here. pub max_depth: usize, /// Every encoded argument will increase the token count. Encoding will fail if the current /// token count becomes greater than `max_tokens` configured here. pub max_tokens: usize, } // ANCHOR: default_encoder_config impl Default for EncoderConfig { fn default() -> Self { Self { max_depth: 45, max_tokens: 10_000, } } } // ANCHOR_END: default_encoder_config #[derive(Default, Clone, Debug)] pub struct ABIEncoder { pub config: EncoderConfig, } impl ABIEncoder { pub fn new(config: EncoderConfig) -> Self { Self { config } } /// Encodes `Token`s following the ABI specs defined /// [here](https://github.com/FuelLabs/fuel-specs/blob/master/specs/protocol/abi.md) pub fn encode(&self, tokens: &[Token]) -> Result<Vec<u8>> { BoundedEncoder::new(self.config).encode(tokens) } } #[cfg(test)] mod tests { use std::slice; use super::*; use crate::{ to_named, types::{ StaticStringToken, U256, errors::Error, param_types::{EnumVariants, ParamType}, }, }; #[test] fn encode_multiple_uint() -> Result<()> { let tokens = [ Token::U8(u8::MAX), Token::U16(u16::MAX), Token::U32(u32::MAX), Token::U64(u64::MAX), Token::U128(u128::MAX), Token::U256(U256::MAX), ]; let result = ABIEncoder::default().encode(&tokens)?; let expected = [ 255, // u8 255, 255, // u16 255, 255, 255, 255, // u32 255, 255, 255, 255, 255, 255, 255, 255, // u64 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // u128 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // u256 ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_bool() -> Result<()> { let token = Token::Bool(true); let result = ABIEncoder::default().encode(&[token])?; let expected = [1]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_b256() -> Result<()> { let data = [ 213, 87, 156, 70, 223, 204, 127, 24, 32, 112, 19, 230, 91, 68, 228, 203, 78, 44, 34, 152, 244, 172, 69, 123, 168, 248, 39, 67, 243, 30, 147, 11, ]; let token = Token::B256(data); let result = ABIEncoder::default().encode(&[token])?; assert_eq!(result, data); Ok(()) } #[test] fn encode_bytes() -> Result<()> { let token = Token::Bytes([255, 0, 1, 2, 3, 4, 5].to_vec()); let result = ABIEncoder::default().encode(&[token])?; let expected = [ 0, 0, 0, 0, 0, 0, 0, 7, // len 255, 0, 1, 2, 3, 4, 5, // data ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_string() -> Result<()> { let token = Token::String("This is a full sentence".to_string()); let result = ABIEncoder::default().encode(&[token])?; let expected = [ 0, 0, 0, 0, 0, 0, 0, 23, // len 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 117, 108, 108, 32, 115, 101, 110, 116, 101, 110, 99, 101, //This is a full sentence ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_raw_slice() -> Result<()> { let token = Token::RawSlice([255, 0, 1, 2, 3, 4, 5].to_vec()); let result = ABIEncoder::default().encode(&[token])?; let expected = [ 0, 0, 0, 0, 0, 0, 0, 7, // len 255, 0, 1, 2, 3, 4, 5, // data ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_string_array() -> Result<()> { let token = Token::StringArray(StaticStringToken::new( "This is a full sentence".into(), Some(23), )); let result = ABIEncoder::default().encode(&[token])?; let expected = [ 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 117, 108, 108, 32, 115, 101, 110, 116, 101, 110, 99, 101, //This is a full sentence ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_string_slice() -> Result<()> { let token = Token::StringSlice(StaticStringToken::new( "This is a full sentence".into(), None, )); let result = ABIEncoder::default().encode(&[token])?; let expected = [ 0, 0, 0, 0, 0, 0, 0, 23, // len 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 117, 108, 108, 32, 115, 101, 110, 116, 101, 110, 99, 101, //This is a full sentence ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_tuple() -> Result<()> { let token = Token::Tuple(vec![Token::U32(255), Token::Bool(true)]); let result = ABIEncoder::default().encode(&[token])?; let expected = [ 0, 0, 0, 255, //u32 1, //bool ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_array() -> Result<()> { let token = Token::Tuple(vec![Token::U32(255), Token::U32(128)]); let result = ABIEncoder::default().encode(&[token])?; let expected = [ 0, 0, 0, 255, //u32 0, 0, 0, 128, //u32 ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_enum_with_deeply_nested_types() -> Result<()> { /* enum DeeperEnum { v1: bool, v2: str[10] } */ let types = to_named(&[ParamType::Bool, ParamType::StringArray(10)]); let deeper_enum_variants = EnumVariants::new(types)?; let deeper_enum_token = Token::StringArray(StaticStringToken::new("0123456789".into(), Some(10))); /* struct StructA { some_enum: DeeperEnum some_number: u32 } */ let fields = to_named(&[ ParamType::Enum { name: "".to_string(), enum_variants: deeper_enum_variants.clone(), generics: vec![], }, ParamType::Bool, ]); let struct_a_type = ParamType::Struct { name: "".to_string(), fields, generics: vec![], }; let struct_a_token = Token::Struct(vec![ Token::Enum(Box::new((1, deeper_enum_token, deeper_enum_variants))), Token::U32(11332), ]); /* enum TopLevelEnum { v1: StructA, v2: bool, v3: u64 } */ let types = to_named(&[struct_a_type, ParamType::Bool, ParamType::U64]); let top_level_enum_variants = EnumVariants::new(types)?; let top_level_enum_token = Token::Enum(Box::new((0, struct_a_token, top_level_enum_variants))); let result = ABIEncoder::default().encode(slice::from_ref(&top_level_enum_token))?; let expected = [ 0, 0, 0, 0, 0, 0, 0, 0, // TopLevelEnum::v1 discriminant 0, 0, 0, 0, 0, 0, 0, 1, // DeeperEnum::v2 discriminant 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, // str[10] 0, 0, 44, 68, // StructA.some_number ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_nested_structs() -> Result<()> { let token = Token::Struct(vec![ Token::U16(10), Token::Struct(vec![ Token::Bool(true), Token::Array(vec![Token::U8(1), Token::U8(2)]), ]), ]); let result = ABIEncoder::default().encode(&[token])?; let expected = [ 0, 10, // u16 1, // bool 1, 2, // [u8, u8] ]; assert_eq!(result, expected); Ok(()) } #[test] fn encode_comprehensive() -> Result<()> { let foo = Token::Struct(vec![ Token::U16(10), Token::Struct(vec![ Token::Bool(true), Token::Array(vec![Token::U8(1), Token::U8(2)]), ]), ]); let arr_u8 = Token::Array(vec![Token::U8(1), Token::U8(2)]); let b256 = Token::B256([255; 32]); let str_arr = Token::StringArray(StaticStringToken::new( "This is a full sentence".into(), Some(23), )); let tokens = vec![foo, arr_u8, b256, str_arr]; let result = ABIEncoder::default().encode(&tokens)?; let expected = [ 0, 10, // foo.x == 10u16 1, // foo.y.a == true 1, // foo.y.b.0 == 1u8 2, // foo.y.b.1 == 2u8 1, // u8[2].0 == 1u8 2, // u8[2].0 == 2u8 255, 255, 255, 255, 255, 255, 255, 255, // b256 255, 255, 255, 255, 255, 255, 255, 255, // b256 255, 255, 255, 255, 255, 255, 255, 255, // b256 255, 255, 255, 255, 255, 255, 255, 255, // b256 84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 117, 108, 108, 32, 115, 101, 110, 116, 101, 110, 99, 101, // str[23] ]; assert_eq!(result, expected); Ok(()) } #[test] fn enums_with_only_unit_variants_are_encoded_in_one_word() -> Result<()> { let expected = [0, 0, 0, 0, 0, 0, 0, 1]; let types = to_named(&[ParamType::Unit, ParamType::Unit]); let enum_selector = Box::new((1, Token::Unit, EnumVariants::new(types)?)); let actual = ABIEncoder::default().encode(&[Token::Enum(enum_selector)])?; assert_eq!(actual, expected); Ok(()) } #[test] fn vec_in_enum() -> Result<()> { // arrange let types = to_named(&[ParamType::B256, ParamType::Vector(Box::new(ParamType::U64))]); let variants = EnumVariants::new(types)?; let selector = (1, Token::Vector(vec![Token::U64(5)]), variants); let token = Token::Enum(Box::new(selector)); // act let result = ABIEncoder::default().encode(&[token])?; // assert let expected = [ 0, 0, 0, 0, 0, 0, 0, 1, // enum dicsriminant 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 5, // vec[len, u64] ]; assert_eq!(result, expected); Ok(()) } #[test] fn enum_in_vec() -> Result<()> { // arrange let types = to_named(&[ParamType::B256, ParamType::U8]); let variants = EnumVariants::new(types)?; let selector = (1, Token::U8(8), variants); let enum_token = Token::Enum(Box::new(selector)); let vec_token = Token::Vector(vec![enum_token]); // act let result = ABIEncoder::default().encode(&[vec_token])?; // assert let expected = [ 0, 0, 0, 0, 0, 0, 0, 1, // vec len 0, 0, 0, 0, 0, 0, 0, 1, 8, // enum discriminant and u8 value ]; assert_eq!(result, expected); Ok(()) } #[test] fn vec_in_struct() -> Result<()> { // arrange let token = Token::Struct(vec![Token::Vector(vec![Token::U64(5)]), Token::U8(9)]); // act let result = ABIEncoder::default().encode(&[token])?; // assert let expected = [ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 5, // vec[len, u64] 9, // u8 ]; assert_eq!(result, expected); Ok(()) } #[test] fn vec_in_vec() -> Result<()> { // arrange let token = Token::Vector(vec![Token::Vector(vec![Token::U8(5), Token::U8(6)])]); // act let result = ABIEncoder::default().encode(&[token])?; // assert let expected = [ 0, 0, 0, 0, 0, 0, 0, 1, // vec1 len 0, 0, 0, 0, 0, 0, 0, 2, 5, 6, // vec2 [len, u8, u8] ]; assert_eq!(result, expected); Ok(()) } #[test] fn max_depth_surpassed() { const MAX_DEPTH: usize = 2; let config = EncoderConfig { max_depth: MAX_DEPTH, ..Default::default() }; let msg = "depth limit `2` reached while encoding. Try increasing it".to_string(); [nested_struct, nested_enum, nested_tuple, nested_array] .iter() .map(|fun| fun(MAX_DEPTH + 1)) .for_each(|token| { assert_encoding_failed(config, token, &msg); }); } fn assert_encoding_failed(config: EncoderConfig, token: Token, msg: &str) { let encoder = ABIEncoder::new(config); let err = encoder.encode(&[token]); let Err(Error::Codec(actual_msg)) = err else { panic!("expected a Codec error. Got: `{err:?}`"); }; assert_eq!(actual_msg, msg); } fn nested_struct(depth: usize) -> Token { let fields = if depth == 1 { vec![Token::U8(255), Token::String("bloopblip".to_string())] } else { vec![nested_struct(depth - 1)] }; Token::Struct(fields) } fn nested_enum(depth: usize) -> Token { if depth == 0 { return Token::U8(255); } let inner_enum = nested_enum(depth - 1); // Create a basic EnumSelector for the current level (the `EnumVariants` is not // actually accurate but it's not used for encoding) let selector = ( 0u64, inner_enum, EnumVariants::new(to_named(&[ParamType::U64])).unwrap(), ); Token::Enum(Box::new(selector)) } fn nested_array(depth: usize) -> Token { if depth == 1 { Token::Array(vec![Token::U8(255)]) } else { Token::Array(vec![nested_array(depth - 1)]) } } fn nested_tuple(depth: usize) -> Token { let fields = if depth == 1 { vec![Token::U8(255), Token::String("bloopblip".to_string())] } else { vec![nested_tuple(depth - 1)] }; Token::Tuple(fields) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/abi_encoder/bounded_encoder.rs
packages/fuels-core/src/codec/abi_encoder/bounded_encoder.rs
use crate::{ codec::{ EncoderConfig, utils::{CodecDirection, CounterWithLimit}, }, types::{EnumSelector, StaticStringToken, Token, U256, errors::Result}, }; pub(crate) struct BoundedEncoder { depth_tracker: CounterWithLimit, token_tracker: CounterWithLimit, } impl BoundedEncoder { pub(crate) fn new(config: EncoderConfig) -> Self { let depth_tracker = CounterWithLimit::new(config.max_depth, "depth", CodecDirection::Encoding); let token_tracker = CounterWithLimit::new(config.max_tokens, "token", CodecDirection::Encoding); Self { depth_tracker, token_tracker, } } pub fn encode(&mut self, tokens: &[Token]) -> Result<Vec<u8>> { let mut data = vec![]; for token in tokens.iter() { let new_data = self.encode_token(token)?; data.extend(new_data); } Ok(data) } fn run_w_depth_tracking( &mut self, encoder: impl FnOnce(&mut Self) -> Result<Vec<u8>>, ) -> Result<Vec<u8>> { self.depth_tracker.increase()?; let res = encoder(self); self.depth_tracker.decrease(); res } fn encode_token(&mut self, arg: &Token) -> Result<Vec<u8>> { self.token_tracker.increase()?; let encoded_token = match arg { Token::Unit => vec![], Token::Bool(arg_bool) => vec![u8::from(*arg_bool)], Token::U8(arg_u8) => vec![*arg_u8], Token::U16(arg_u16) => arg_u16.to_be_bytes().to_vec(), Token::U32(arg_u32) => arg_u32.to_be_bytes().to_vec(), Token::U64(arg_u64) => arg_u64.to_be_bytes().to_vec(), Token::U128(arg_u128) => arg_u128.to_be_bytes().to_vec(), Token::U256(arg_u256) => Self::encode_u256(*arg_u256), Token::B256(arg_bits256) => arg_bits256.to_vec(), Token::Bytes(data) => Self::encode_bytes(data.to_vec())?, Token::String(string) => Self::encode_bytes(string.clone().into_bytes())?, Token::RawSlice(data) => Self::encode_bytes(data.clone())?, Token::StringArray(arg_string) => Self::encode_string_array(arg_string)?, Token::StringSlice(arg_string) => Self::encode_string_slice(arg_string)?, Token::Tuple(arg_tuple) => self.run_w_depth_tracking(|ctx| ctx.encode(arg_tuple))?, Token::Array(arg_array) => self.run_w_depth_tracking(|ctx| ctx.encode(arg_array))?, Token::Vector(data) => self.run_w_depth_tracking(|ctx| ctx.encode_vector(data))?, Token::Struct(arg_struct) => self.run_w_depth_tracking(|ctx| ctx.encode(arg_struct))?, Token::Enum(arg_enum) => self.run_w_depth_tracking(|ctx| ctx.encode_enum(arg_enum))?, }; Ok(encoded_token) } fn encode_u256(arg_u256: U256) -> Vec<u8> { let mut bytes = [0u8; 32]; arg_u256.to_big_endian(&mut bytes); bytes.to_vec() } fn encode_bytes(data: Vec<u8>) -> Result<Vec<u8>> { let len = data.len(); Ok([Self::encode_length(len as u64), data].concat()) } fn encode_string_array(arg_string: &StaticStringToken) -> Result<Vec<u8>> { Ok(arg_string.get_encodable_str()?.as_bytes().to_vec()) } fn encode_string_slice(arg_string: &StaticStringToken) -> Result<Vec<u8>> { Self::encode_bytes(arg_string.get_encodable_str()?.as_bytes().to_vec()) } fn encode_vector(&mut self, data: &[Token]) -> Result<Vec<u8>> { let encoded_data = self.encode(data)?; Ok([Self::encode_length(data.len() as u64), encoded_data].concat()) } fn encode_enum(&mut self, selector: &EnumSelector) -> Result<Vec<u8>> { let (discriminant, token_within_enum, _) = selector; let encoded_discriminant = Self::encode_discriminant(*discriminant); let encoded_token = self.encode_token(token_within_enum)?; Ok([encoded_discriminant, encoded_token].concat()) } fn encode_length(len: u64) -> Vec<u8> { len.to_be_bytes().to_vec() } fn encode_discriminant(discriminant: u64) -> Vec<u8> { discriminant.to_be_bytes().to_vec() } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/abi_decoder/bounded_decoder.rs
packages/fuels-core/src/codec/abi_decoder/bounded_decoder.rs
use crate::{ codec::{ DecoderConfig, utils::{CodecDirection, CounterWithLimit}, }, types::{ StaticStringToken, Token, U256, errors::{Result, error}, param_types::{EnumVariants, NamedParamType, ParamType}, }, }; use std::iter::repeat_n; use std::{io::Read, str}; /// Is used to decode bytes into `Token`s from which types implementing `Tokenizable` can be /// instantiated. Implements decoding limits to control resource usage. pub(crate) struct BoundedDecoder { depth_tracker: CounterWithLimit, token_tracker: CounterWithLimit, } impl BoundedDecoder { pub(crate) fn new(config: DecoderConfig) -> Self { let depth_tracker = CounterWithLimit::new(config.max_depth, "depth", CodecDirection::Decoding); let token_tracker = CounterWithLimit::new(config.max_tokens, "token", CodecDirection::Decoding); Self { depth_tracker, token_tracker, } } pub(crate) fn decode<R: Read>( &mut self, param_type: &ParamType, bytes: &mut R, ) -> Result<Token> { self.decode_param(param_type, bytes) } pub(crate) fn decode_multiple<R: Read>( &mut self, param_types: &[ParamType], bytes: &mut R, ) -> Result<Vec<Token>> { self.decode_params(param_types, bytes) } fn run_w_depth_tracking( &mut self, decoder: impl FnOnce(&mut Self) -> Result<Token>, ) -> Result<Token> { self.depth_tracker.increase()?; let res = decoder(self); self.depth_tracker.decrease(); res } fn decode_param<R: Read>(&mut self, param_type: &ParamType, bytes: &mut R) -> Result<Token> { self.token_tracker.increase()?; match param_type { ParamType::Unit => Ok(Token::Unit), ParamType::Bool => decode(bytes, |[value]| Token::Bool(value != 0)), ParamType::U8 => decode(bytes, |[value]| Token::U8(value)), ParamType::U16 => decode(bytes, |value| Token::U16(u16::from_be_bytes(value))), ParamType::U32 => decode(bytes, |value| Token::U32(u32::from_be_bytes(value))), ParamType::U64 => decode(bytes, |value| Token::U64(u64::from_be_bytes(value))), ParamType::U128 => decode(bytes, |value| Token::U128(u128::from_be_bytes(value))), ParamType::U256 => decode(bytes, |value| Token::U256(U256::from(value))), ParamType::B256 => decode(bytes, Token::B256), ParamType::Bytes => Ok(Token::Bytes(decode_slice(bytes)?)), ParamType::String => Self::decode_std_string(bytes), ParamType::RawSlice => Ok(Token::RawSlice(decode_slice(bytes)?)), ParamType::StringArray(length) => Self::decode_string_array(bytes, *length), ParamType::StringSlice => Self::decode_string_slice(bytes), ParamType::Tuple(param_types) => { self.run_w_depth_tracking(|ctx| ctx.decode_tuple(param_types, bytes)) } ParamType::Array(param_type, length) => { self.run_w_depth_tracking(|ctx| ctx.decode_array(param_type, bytes, *length)) } ParamType::Vector(param_type) => { self.run_w_depth_tracking(|ctx| ctx.decode_vector(param_type, bytes)) } ParamType::Struct { fields, .. } => { self.run_w_depth_tracking(|ctx| ctx.decode_struct(fields, bytes)) } ParamType::Enum { enum_variants, .. } => { self.run_w_depth_tracking(|ctx| ctx.decode_enum(enum_variants, bytes)) } } } fn decode_std_string<R: Read>(bytes: &mut R) -> Result<Token> { let data = decode_slice(bytes)?; let string = str::from_utf8(&data)?.to_string(); Ok(Token::String(string)) } fn decode_string_array<R: Read>(bytes: &mut R, length: usize) -> Result<Token> { let data = decode_sized(bytes, length)?; let decoded = str::from_utf8(&data)?.to_string(); Ok(Token::StringArray(StaticStringToken::new( decoded, Some(length), ))) } fn decode_string_slice<R: Read>(bytes: &mut R) -> Result<Token> { let data = decode_slice(bytes)?; let decoded = str::from_utf8(&data)?.to_string(); Ok(Token::StringSlice(StaticStringToken::new(decoded, None))) } fn decode_tuple<R: Read>(&mut self, param_types: &[ParamType], bytes: &mut R) -> Result<Token> { Ok(Token::Tuple(self.decode_params(param_types, bytes)?)) } fn decode_array<R: Read>( &mut self, param_type: &ParamType, bytes: &mut R, length: usize, ) -> Result<Token> { Ok(Token::Array( self.decode_params(repeat_n(param_type, length), bytes)?, )) } fn decode_vector<R: Read>(&mut self, param_type: &ParamType, bytes: &mut R) -> Result<Token> { let length = decode_len(bytes)?; Ok(Token::Vector( self.decode_params(repeat_n(param_type, length), bytes)?, )) } fn decode_struct<R: Read>( &mut self, fields: &[NamedParamType], bytes: &mut R, ) -> Result<Token> { Ok(Token::Struct( self.decode_params(fields.iter().map(|(_, pt)| pt), bytes)?, )) } fn decode_enum<R: Read>( &mut self, enum_variants: &EnumVariants, bytes: &mut R, ) -> Result<Token> { let discriminant = decode(bytes, u64::from_be_bytes)?; let (_, selected_variant) = enum_variants.select_variant(discriminant)?; let decoded = self.decode_param(selected_variant, bytes)?; Ok(Token::Enum(Box::new(( discriminant, decoded, enum_variants.clone(), )))) } fn decode_params<'a, R: Read>( &mut self, param_types: impl IntoIterator<Item = &'a ParamType>, bytes: &mut R, ) -> Result<Vec<Token>> { let mut tokens = vec![]; for param_type in param_types { tokens.push(self.decode_param(param_type, bytes)?); } Ok(tokens) } } /// Decodes a fixed-size array of bytes using a converter function. fn decode<const SIZE: usize, R: Read, Out>( bytes: &mut R, f: impl FnOnce([u8; SIZE]) -> Out, ) -> Result<Out> { let mut buffer = [0u8; SIZE]; bytes.read_exact(&mut buffer)?; Ok(f(buffer)) } /// Reads a byte array with known size. fn decode_sized<R: Read>(bytes: &mut R, len: usize) -> Result<Vec<u8>> { let mut data = vec![0; len]; bytes.read_exact(&mut data)?; Ok(data) } /// Decodes a length prefix. fn decode_len<R: Read>(bytes: &mut R) -> Result<usize> { let len_u64 = decode(bytes, u64::from_be_bytes)?; let len: usize = len_u64 .try_into() .map_err(|_| error!(Other, "could not convert `u64` to `usize`"))?; Ok(len) } /// Decodes a size-prefixed slice. fn decode_slice<R: Read>(bytes: &mut R) -> Result<Vec<u8>> { let len = decode_len(bytes)?; let mut data = vec![0; len]; bytes.read_exact(&mut data)?; Ok(data) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/codec/abi_decoder/decode_as_debug_str.rs
packages/fuels-core/src/codec/abi_decoder/decode_as_debug_str.rs
use std::iter::zip; use crate::types::{ Token, errors::{Result, error}, param_types::ParamType, }; fn inner_types_debug(tokens: &[Token], inner_type: &ParamType, join_str: &str) -> Result<String> { let inner_types_log = tokens .iter() .map(|token| decode_as_debug_str(inner_type, token)) .collect::<Result<Vec<_>>>()? .join(join_str); Ok(inner_types_log) } pub(crate) fn decode_as_debug_str(param_type: &ParamType, token: &Token) -> Result<String> { let result = match (param_type, token) { (ParamType::Unit, Token::Unit) => "()".to_string(), (ParamType::Bool, Token::Bool(val)) => val.to_string(), (ParamType::U8, Token::U8(val)) => val.to_string(), (ParamType::U16, Token::U16(val)) => val.to_string(), (ParamType::U32, Token::U32(val)) => val.to_string(), (ParamType::U64, Token::U64(val)) => val.to_string(), (ParamType::U128, Token::U128(val)) => val.to_string(), (ParamType::U256, Token::U256(val)) => val.to_string(), (ParamType::B256, Token::B256(val)) => { format!("Bits256({val:?})") } (ParamType::Bytes, Token::Bytes(val)) => { format!("Bytes({val:?})") } (ParamType::String, Token::String(val)) => val.clone(), (ParamType::RawSlice, Token::RawSlice(val)) => { format!("RawSlice({val:?})") } (ParamType::StringArray(..), Token::StringArray(str_token)) => { format!("SizedAsciiString {{ data: \"{}\" }}", str_token.data) } (ParamType::StringSlice, Token::StringSlice(str_token)) => { format!("AsciiString {{ data: \"{}\" }}", str_token.data) } (ParamType::Tuple(types), Token::Tuple(tokens)) => { let elements = zip(types, tokens) .map(|(ptype, token)| decode_as_debug_str(ptype, token)) .collect::<Result<Vec<_>>>()? .join(", "); format!("({elements})") } (ParamType::Array(inner_type, _), Token::Array(tokens)) => { let elements = inner_types_debug(tokens, inner_type, ", ")?; format!("[{elements}]") } (ParamType::Vector(inner_type), Token::Vector(tokens)) => { let elements = inner_types_debug(tokens, inner_type, ", ")?; format!("[{elements}]") } (ParamType::Struct { name, fields, .. }, Token::Struct(field_tokens)) => { let fields = zip(fields, field_tokens) .map(|((field_name, param_type), token)| -> Result<_> { Ok(format!( "{field_name}: {}", decode_as_debug_str(param_type, token)? )) }) .collect::<Result<Vec<_>>>()? .join(", "); format!("{name} {{ {fields} }}") } (ParamType::Enum { .. }, Token::Enum(selector)) => { let (discriminant, token, variants) = selector.as_ref(); let (variant_name, variant_param_type) = variants.select_variant(*discriminant)?; let variant_str = decode_as_debug_str(variant_param_type, token)?; let variant_str = if variant_str == "()" { "".into() } else { format!("({variant_str})") }; format!("{variant_name}{variant_str}") } _ => { return Err(error!( Codec, "could not decode debug from param type: `{param_type:?}` and token: `{token:?}`" )); } }; Ok(result) } #[cfg(test)] mod tests { use crate::{ codec::ABIDecoder, traits::Parameterize, types::{ AsciiString, Bits256, Bytes, EvmAddress, RawSlice, SizedAsciiString, U256, errors::Result, }, }; #[test] fn param_type_decode_debug() -> Result<()> { let decoder = ABIDecoder::default(); { assert_eq!( format!("{:?}", true), decoder.decode_as_debug_str(&bool::param_type(), [1].as_slice())? ); assert_eq!( format!("{:?}", 128u8), decoder.decode_as_debug_str(&u8::param_type(), [128].as_slice())? ); assert_eq!( format!("{:?}", 256u16), decoder.decode_as_debug_str(&u16::param_type(), [1, 0].as_slice())? ); assert_eq!( format!("{:?}", 512u32), decoder.decode_as_debug_str(&u32::param_type(), [0, 0, 2, 0].as_slice())? ); assert_eq!( format!("{:?}", 1024u64), decoder .decode_as_debug_str(&u64::param_type(), [0, 0, 0, 0, 0, 0, 4, 0].as_slice())? ); assert_eq!( format!("{:?}", 1024u128), decoder.decode_as_debug_str( &u128::param_type(), [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0].as_slice() )? ); assert_eq!( format!("{:?}", U256::from(2048)), decoder.decode_as_debug_str( &U256::param_type(), [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0 ] .as_slice() )? ); } { let bytes = [ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74, ]; let bits256 = Bits256(bytes); assert_eq!( format!("{bits256:?}"), decoder.decode_as_debug_str( &Bits256::param_type(), [ 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74 ] .as_slice() )? ); assert_eq!( format!("{:?}", Bytes(bytes.to_vec())), decoder.decode_as_debug_str( &Bytes::param_type(), [ 0, 0, 0, 0, 0, 0, 0, 32, 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74 ] .as_slice() )? ); assert_eq!( format!("{:?}", RawSlice(bytes.to_vec())), decoder.decode_as_debug_str( &RawSlice::param_type(), [ 0, 0, 0, 0, 0, 0, 0, 32, 239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74 ] .as_slice() )? ); assert_eq!( format!("{:?}", EvmAddress::from(bits256)), decoder.decode_as_debug_str( &EvmAddress::param_type(), [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 166, 225, 89, 161, 16, 60, 239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74 ] .as_slice() )? ); } { assert_eq!( format!("{:?}", AsciiString::new("Fuel".to_string())?), decoder.decode_as_debug_str( &AsciiString::param_type(), [0, 0, 0, 0, 0, 0, 0, 4, 70, 117, 101, 108].as_slice() )? ); assert_eq!( format!("{:?}", SizedAsciiString::<4>::new("Fuel".to_string())?), decoder.decode_as_debug_str( &SizedAsciiString::<4>::param_type(), [70, 117, 101, 108, 0, 0, 0, 0].as_slice() )? ); assert_eq!( format!("{}", "Fuel"), decoder.decode_as_debug_str( &String::param_type(), [0, 0, 0, 0, 0, 0, 0, 4, 70, 117, 101, 108].as_slice() )? ); } { assert_eq!( format!("{:?}", (1, 2)), decoder.decode_as_debug_str(&<(u8, u8)>::param_type(), [1, 2].as_slice())? ); assert_eq!( format!("{:?}", [3, 4]), decoder.decode_as_debug_str( &<[u64; 2]>::param_type(), [0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 4].as_slice() )? ); } { assert_eq!( format!("{:?}", Some(42)), decoder.decode_as_debug_str( &<Option<u64>>::param_type(), [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 42].as_slice() )? ); assert_eq!( format!("{:?}", Err::<u64, u64>(42u64)), decoder.decode_as_debug_str( &<std::result::Result<u64, u64>>::param_type(), [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 42].as_slice() )? ); } Ok(()) } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/utils/offsets.rs
packages/fuels-core/src/utils/offsets.rs
use fuel_asm::Instruction; use fuel_tx::{ConsensusParameters, field::Script}; use fuel_types::bytes::padded_len_usize; use crate::{error, types::errors::Result}; /// Gets the base offset for a script or a predicate. The offset depends on the `max_inputs` /// field of the `ConsensusParameters` and the static offset. pub fn base_offset_script(consensus_parameters: &ConsensusParameters) -> usize { consensus_parameters.tx_params().tx_offset() + fuel_tx::Script::script_offset_static() } /// Calculates the length of the script based on the number of contract calls it /// has to make and returns the offset at which the script data begins pub fn call_script_data_offset( consensus_parameters: &ConsensusParameters, calls_instructions_len: usize, ) -> Result<usize> { // Instruction::SIZE is a placeholder for the RET instruction which is added later for returning // from the script. This doesn't happen in the predicate. let opcode_len = Instruction::SIZE; let padded_len = padded_len_usize(calls_instructions_len + opcode_len).ok_or_else(|| { error!( Other, "call script data len overflow: {calls_instructions_len}" ) })?; Ok(base_offset_script(consensus_parameters) + padded_len) }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/utils/constants.rs
packages/fuels-core/src/utils/constants.rs
use fuel_tx::Word; pub const ENUM_DISCRIMINANT_BYTE_WIDTH: usize = 8; pub const WORD_SIZE: usize = core::mem::size_of::<Word>(); // ANCHOR: default_call_parameters pub const DEFAULT_CALL_PARAMS_AMOUNT: u64 = 0; // ANCHOR_END: default_call_parameters pub const DEFAULT_GAS_ESTIMATION_TOLERANCE: f64 = 0.2; pub const DEFAULT_GAS_ESTIMATION_BLOCK_HORIZON: u32 = 5; // The size of a signature inside a transaction `Witness` pub const WITNESS_STATIC_SIZE: usize = 8; const SIGNATURE_SIZE: usize = 64; pub const SIGNATURE_WITNESS_SIZE: usize = WITNESS_STATIC_SIZE + SIGNATURE_SIZE;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/types/errors.rs
packages/fuels-core/src/types/errors.rs
pub mod transaction { #[cfg(feature = "std")] use std::sync::Arc; #[cfg(not(feature = "std"))] use alloc::sync::Arc; #[derive(thiserror::Error, Debug, Clone)] pub enum Reason { #[error("builder: {0}")] Builder(String), #[error("validation: {0}")] Validation(String), #[error("squeezedOut: {0}")] SqueezedOut(String), #[error("reverted: {reason}, receipts: {receipts:?}")] Failure { reason: String, revert_id: Option<u64>, receipts: Arc<Vec<fuel_tx::Receipt>>, }, #[error(": {0}")] Other(String), } impl Reason { pub(crate) fn context(self, context: impl std::fmt::Display) -> Self { match self { Reason::Builder(msg) => Reason::Builder(format!("{context}: {msg}")), Reason::Validation(msg) => Reason::Validation(format!("{context}: {msg}")), Reason::SqueezedOut(msg) => Reason::SqueezedOut(format!("{context}: {msg}")), Reason::Failure { reason, revert_id, receipts, } => Reason::Failure { reason: format!("{context}: {reason}"), revert_id, receipts, }, Reason::Other(msg) => Reason::Other(format!("{context}: {msg}")), } } } } use std::fmt::Display; use crate::sealed::Sealed; #[derive(thiserror::Error, Debug, Clone)] pub enum Error { #[error("io: {0}")] IO(String), #[error("codec: {0}")] Codec(String), #[error("transaction {0}")] Transaction(transaction::Reason), #[error("provider: {0}")] Provider(String), #[error("{0}")] Other(String), } impl From<std::io::Error> for Error { fn from(value: std::io::Error) -> Self { Self::IO(value.to_string()) } } impl Error { pub(crate) fn context(self, context: impl Display) -> Self { match self { Error::IO(msg) => Error::IO(format!("{context}: {msg}")), Error::Codec(msg) => Error::Codec(format!("{context}: {msg}")), Error::Transaction(reason) => Error::Transaction(reason.context(context)), Error::Provider(msg) => Error::Provider(format!("{context}: {msg}")), Error::Other(msg) => Error::Other(format!("{context}: {msg}")), } } } pub type Result<T> = std::result::Result<T, Error>; /// Provides `context` and `with_context` to `Result`. /// /// # Examples /// ``` /// use fuels_core::types:: errors::{Context, Error, Result}; /// /// let res_with_context: Result<()> = /// Err(Error::Other("some error".to_owned())).context("some context"); /// /// let res_with_context: Result<()> = /// Err(Error::Other("some error".to_owned())).with_context(|| "some context"); /// ``` pub trait Context<T>: Sealed { fn context<C>(self, context: C) -> Result<T> where C: Display + Send + Sync + 'static; fn with_context<C, F>(self, f: F) -> Result<T> where C: Display + Send + Sync + 'static, F: FnOnce() -> C; } impl<T> Sealed for Result<T> {} impl<T> Context<T> for Result<T> { /// Wrap the error value with additional context fn context<C>(self, context: C) -> Result<T> where C: Display + Send + Sync + 'static, { self.map_err(|e| e.context(context)) } /// Wrap the error value with additional context that is evaluated lazily fn with_context<C, F>(self, context: F) -> Result<T> where C: Display + Send + Sync + 'static, F: FnOnce() -> C, { self.context(context()) } } /// This macro can only be used for `Error` variants that have a `String` field. /// Those are: `IO`, `Codec`, `Provider`, `Other`. #[macro_export] macro_rules! error { ($err_variant:ident, $fmt_str: literal $(,$arg: expr)*) => { $crate::types::errors::Error::$err_variant(format!($fmt_str,$($arg),*)) } } pub use error; /// This macro can only be used for `Error::Transaction` variants that have a `String` field. /// Those are: `Builder`, `Validation`, `SqueezedOut`, `Other`. #[macro_export] macro_rules! error_transaction { ($err_variant:ident, $fmt_str: literal $(,$arg: expr)*) => { $crate::types::errors::Error::Transaction( $crate::types::errors::transaction::Reason::$err_variant(format!($fmt_str,$($arg),*))) } } pub use error_transaction; impl From<fuel_vm::checked_transaction::CheckError> for Error { fn from(err: fuel_vm::checked_transaction::CheckError) -> Error { error_transaction!(Validation, "{err:?}") } } impl From<fuel_tx::ValidityError> for Error { fn from(err: fuel_tx::ValidityError) -> Error { error_transaction!(Validation, "{err:?}") } } macro_rules! impl_error_from { ($err_variant:ident, $err_type:ty ) => { impl From<$err_type> for $crate::types::errors::Error { fn from(err: $err_type) -> $crate::types::errors::Error { $crate::types::errors::Error::$err_variant(err.to_string()) } } }; } impl_error_from!(Other, &'static str); impl_error_from!(Other, fuel_crypto::Error); impl_error_from!(Other, serde_json::Error); impl_error_from!(Other, hex::FromHexError); impl_error_from!(Other, std::array::TryFromSliceError); impl_error_from!(Other, std::str::Utf8Error); impl_error_from!(Other, fuel_abi_types::error::Error); #[cfg(test)] mod tests { use super::*; #[test] fn result_context() { { let res_with_context: Result<()> = Err(error!(Provider, "some error")).context("some context"); assert_eq!( res_with_context.unwrap_err().to_string(), "provider: some context: some error", ); } { let res_with_context: Result<()> = Err(error_transaction!(Builder, "some error")).context("some context"); assert_eq!( res_with_context.unwrap_err().to_string(), "transaction builder: some context: some error" ); } } #[test] fn result_with_context() { { let res_with_context: Result<()> = Err(error!(Other, "some error")).with_context(|| "some context"); assert_eq!( res_with_context.unwrap_err().to_string(), "some context: some error", ); } { let res_with_context: Result<()> = Err(error_transaction!(Validation, "some error")).with_context(|| "some context"); assert_eq!( res_with_context.unwrap_err().to_string(), "transaction validation: some context: some error" ); } } }
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false
FuelLabs/fuels-rs
https://github.com/FuelLabs/fuels-rs/blob/865e00c295de8b4a0a1ef7ac926c3c8266d5151b/packages/fuels-core/src/types/param_types.rs
packages/fuels-core/src/types/param_types.rs
mod from_type_application; mod param_type; pub use param_type::*;
rust
Apache-2.0
865e00c295de8b4a0a1ef7ac926c3c8266d5151b
2026-01-04T15:31:59.450823Z
false